PR testsuite/85483: Move aarch64/sve/vcond_1.c test to g++.dg/other/
[official-gcc.git] / gcc / tree.c
blobe93f24dd4d30f24df6e25c398a9d063edee7f7fb
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static uint64_t tree_code_counts[MAX_TREE_CODES];
133 uint64_t tree_node_counts[(int) all_kinds];
134 uint64_t tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 /* General tree->tree mapping structure for use in hash tables. */
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
250 return a->base.from == b->base.from;
253 static int
254 keep_cache_entry (tree_vec_map *&m)
256 return ggc_marked_p (m->base.from);
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
268 tree global_trees[TI_MAX];
269 tree integer_types[itk_none];
271 bool int_n_enabled_p[NUM_INT_N_ENTS];
272 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
274 bool tree_contains_struct[MAX_TREE_CODES][64];
276 /* Number of operands for each OpenMP clause. */
277 unsigned const char omp_clause_num_ops[] =
279 0, /* OMP_CLAUSE_ERROR */
280 1, /* OMP_CLAUSE_PRIVATE */
281 1, /* OMP_CLAUSE_SHARED */
282 1, /* OMP_CLAUSE_FIRSTPRIVATE */
283 2, /* OMP_CLAUSE_LASTPRIVATE */
284 5, /* OMP_CLAUSE_REDUCTION */
285 1, /* OMP_CLAUSE_COPYIN */
286 1, /* OMP_CLAUSE_COPYPRIVATE */
287 3, /* OMP_CLAUSE_LINEAR */
288 2, /* OMP_CLAUSE_ALIGNED */
289 1, /* OMP_CLAUSE_DEPEND */
290 1, /* OMP_CLAUSE_UNIFORM */
291 1, /* OMP_CLAUSE_TO_DECLARE */
292 1, /* OMP_CLAUSE_LINK */
293 2, /* OMP_CLAUSE_FROM */
294 2, /* OMP_CLAUSE_TO */
295 2, /* OMP_CLAUSE_MAP */
296 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
297 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
298 2, /* OMP_CLAUSE__CACHE_ */
299 2, /* OMP_CLAUSE_GANG */
300 1, /* OMP_CLAUSE_ASYNC */
301 1, /* OMP_CLAUSE_WAIT */
302 0, /* OMP_CLAUSE_AUTO */
303 0, /* OMP_CLAUSE_SEQ */
304 1, /* OMP_CLAUSE__LOOPTEMP_ */
305 1, /* OMP_CLAUSE_IF */
306 1, /* OMP_CLAUSE_NUM_THREADS */
307 1, /* OMP_CLAUSE_SCHEDULE */
308 0, /* OMP_CLAUSE_NOWAIT */
309 1, /* OMP_CLAUSE_ORDERED */
310 0, /* OMP_CLAUSE_DEFAULT */
311 3, /* OMP_CLAUSE_COLLAPSE */
312 0, /* OMP_CLAUSE_UNTIED */
313 1, /* OMP_CLAUSE_FINAL */
314 0, /* OMP_CLAUSE_MERGEABLE */
315 1, /* OMP_CLAUSE_DEVICE */
316 1, /* OMP_CLAUSE_DIST_SCHEDULE */
317 0, /* OMP_CLAUSE_INBRANCH */
318 0, /* OMP_CLAUSE_NOTINBRANCH */
319 1, /* OMP_CLAUSE_NUM_TEAMS */
320 1, /* OMP_CLAUSE_THREAD_LIMIT */
321 0, /* OMP_CLAUSE_PROC_BIND */
322 1, /* OMP_CLAUSE_SAFELEN */
323 1, /* OMP_CLAUSE_SIMDLEN */
324 0, /* OMP_CLAUSE_FOR */
325 0, /* OMP_CLAUSE_PARALLEL */
326 0, /* OMP_CLAUSE_SECTIONS */
327 0, /* OMP_CLAUSE_TASKGROUP */
328 1, /* OMP_CLAUSE_PRIORITY */
329 1, /* OMP_CLAUSE_GRAINSIZE */
330 1, /* OMP_CLAUSE_NUM_TASKS */
331 0, /* OMP_CLAUSE_NOGROUP */
332 0, /* OMP_CLAUSE_THREADS */
333 0, /* OMP_CLAUSE_SIMD */
334 1, /* OMP_CLAUSE_HINT */
335 0, /* OMP_CLAUSE_DEFALTMAP */
336 1, /* OMP_CLAUSE__SIMDUID_ */
337 0, /* OMP_CLAUSE__SIMT_ */
338 0, /* OMP_CLAUSE_INDEPENDENT */
339 1, /* OMP_CLAUSE_WORKER */
340 1, /* OMP_CLAUSE_VECTOR */
341 1, /* OMP_CLAUSE_NUM_GANGS */
342 1, /* OMP_CLAUSE_NUM_WORKERS */
343 1, /* OMP_CLAUSE_VECTOR_LENGTH */
344 3, /* OMP_CLAUSE_TILE */
345 2, /* OMP_CLAUSE__GRIDDIM_ */
348 const char * const omp_clause_code_name[] =
350 "error_clause",
351 "private",
352 "shared",
353 "firstprivate",
354 "lastprivate",
355 "reduction",
356 "copyin",
357 "copyprivate",
358 "linear",
359 "aligned",
360 "depend",
361 "uniform",
362 "to",
363 "link",
364 "from",
365 "to",
366 "map",
367 "use_device_ptr",
368 "is_device_ptr",
369 "_cache_",
370 "gang",
371 "async",
372 "wait",
373 "auto",
374 "seq",
375 "_looptemp_",
376 "if",
377 "num_threads",
378 "schedule",
379 "nowait",
380 "ordered",
381 "default",
382 "collapse",
383 "untied",
384 "final",
385 "mergeable",
386 "device",
387 "dist_schedule",
388 "inbranch",
389 "notinbranch",
390 "num_teams",
391 "thread_limit",
392 "proc_bind",
393 "safelen",
394 "simdlen",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "_simduid_",
408 "_simt_",
409 "independent",
410 "worker",
411 "vector",
412 "num_gangs",
413 "num_workers",
414 "vector_length",
415 "tile",
416 "_griddim_"
420 /* Return the tree node structure used by tree code CODE. */
422 static inline enum tree_node_structure_enum
423 tree_node_structure_for_code (enum tree_code code)
425 switch (TREE_CODE_CLASS (code))
427 case tcc_declaration:
429 switch (code)
431 case FIELD_DECL:
432 return TS_FIELD_DECL;
433 case PARM_DECL:
434 return TS_PARM_DECL;
435 case VAR_DECL:
436 return TS_VAR_DECL;
437 case LABEL_DECL:
438 return TS_LABEL_DECL;
439 case RESULT_DECL:
440 return TS_RESULT_DECL;
441 case DEBUG_EXPR_DECL:
442 return TS_DECL_WRTL;
443 case CONST_DECL:
444 return TS_CONST_DECL;
445 case TYPE_DECL:
446 return TS_TYPE_DECL;
447 case FUNCTION_DECL:
448 return TS_FUNCTION_DECL;
449 case TRANSLATION_UNIT_DECL:
450 return TS_TRANSLATION_UNIT_DECL;
451 default:
452 return TS_DECL_NON_COMMON;
455 case tcc_type:
456 return TS_TYPE_NON_COMMON;
457 case tcc_reference:
458 case tcc_comparison:
459 case tcc_unary:
460 case tcc_binary:
461 case tcc_expression:
462 case tcc_statement:
463 case tcc_vl_exp:
464 return TS_EXP;
465 default: /* tcc_constant and tcc_exceptional */
466 break;
468 switch (code)
470 /* tcc_constant cases. */
471 case VOID_CST: return TS_TYPED;
472 case INTEGER_CST: return TS_INT_CST;
473 case POLY_INT_CST: return TS_POLY_INT_CST;
474 case REAL_CST: return TS_REAL_CST;
475 case FIXED_CST: return TS_FIXED_CST;
476 case COMPLEX_CST: return TS_COMPLEX;
477 case VECTOR_CST: return TS_VECTOR;
478 case STRING_CST: return TS_STRING;
479 /* tcc_exceptional cases. */
480 case ERROR_MARK: return TS_COMMON;
481 case IDENTIFIER_NODE: return TS_IDENTIFIER;
482 case TREE_LIST: return TS_LIST;
483 case TREE_VEC: return TS_VEC;
484 case SSA_NAME: return TS_SSA_NAME;
485 case PLACEHOLDER_EXPR: return TS_COMMON;
486 case STATEMENT_LIST: return TS_STATEMENT_LIST;
487 case BLOCK: return TS_BLOCK;
488 case CONSTRUCTOR: return TS_CONSTRUCTOR;
489 case TREE_BINFO: return TS_BINFO;
490 case OMP_CLAUSE: return TS_OMP_CLAUSE;
491 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
492 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
494 default:
495 gcc_unreachable ();
500 /* Initialize tree_contains_struct to describe the hierarchy of tree
501 nodes. */
503 static void
504 initialize_tree_contains_struct (void)
506 unsigned i;
508 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
510 enum tree_code code;
511 enum tree_node_structure_enum ts_code;
513 code = (enum tree_code) i;
514 ts_code = tree_node_structure_for_code (code);
516 /* Mark the TS structure itself. */
517 tree_contains_struct[code][ts_code] = 1;
519 /* Mark all the structures that TS is derived from. */
520 switch (ts_code)
522 case TS_TYPED:
523 case TS_BLOCK:
524 case TS_OPTIMIZATION:
525 case TS_TARGET_OPTION:
526 MARK_TS_BASE (code);
527 break;
529 case TS_COMMON:
530 case TS_INT_CST:
531 case TS_POLY_INT_CST:
532 case TS_REAL_CST:
533 case TS_FIXED_CST:
534 case TS_VECTOR:
535 case TS_STRING:
536 case TS_COMPLEX:
537 case TS_SSA_NAME:
538 case TS_CONSTRUCTOR:
539 case TS_EXP:
540 case TS_STATEMENT_LIST:
541 MARK_TS_TYPED (code);
542 break;
544 case TS_IDENTIFIER:
545 case TS_DECL_MINIMAL:
546 case TS_TYPE_COMMON:
547 case TS_LIST:
548 case TS_VEC:
549 case TS_BINFO:
550 case TS_OMP_CLAUSE:
551 MARK_TS_COMMON (code);
552 break;
554 case TS_TYPE_WITH_LANG_SPECIFIC:
555 MARK_TS_TYPE_COMMON (code);
556 break;
558 case TS_TYPE_NON_COMMON:
559 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
560 break;
562 case TS_DECL_COMMON:
563 MARK_TS_DECL_MINIMAL (code);
564 break;
566 case TS_DECL_WRTL:
567 case TS_CONST_DECL:
568 MARK_TS_DECL_COMMON (code);
569 break;
571 case TS_DECL_NON_COMMON:
572 MARK_TS_DECL_WITH_VIS (code);
573 break;
575 case TS_DECL_WITH_VIS:
576 case TS_PARM_DECL:
577 case TS_LABEL_DECL:
578 case TS_RESULT_DECL:
579 MARK_TS_DECL_WRTL (code);
580 break;
582 case TS_FIELD_DECL:
583 MARK_TS_DECL_COMMON (code);
584 break;
586 case TS_VAR_DECL:
587 MARK_TS_DECL_WITH_VIS (code);
588 break;
590 case TS_TYPE_DECL:
591 case TS_FUNCTION_DECL:
592 MARK_TS_DECL_NON_COMMON (code);
593 break;
595 case TS_TRANSLATION_UNIT_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 default:
600 gcc_unreachable ();
604 /* Basic consistency checks for attributes used in fold. */
605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
606 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
618 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
632 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
633 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
634 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
636 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
637 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
638 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
639 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
641 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
648 /* Init tree.c. */
650 void
651 init_ttree (void)
653 /* Initialize the hash table of types. */
654 type_hash_table
655 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
657 debug_expr_for_decl
658 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
660 value_expr_for_decl
661 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
663 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
665 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
667 int_cst_node = make_int_cst (1, 1);
669 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
671 cl_optimization_node = make_node (OPTIMIZATION_NODE);
672 cl_target_option_node = make_node (TARGET_OPTION_NODE);
674 /* Initialize the tree_contains_struct array. */
675 initialize_tree_contains_struct ();
676 lang_hooks.init_ts ();
680 /* The name of the object as the assembler will see it (but before any
681 translations made by ASM_OUTPUT_LABELREF). Often this is the same
682 as DECL_NAME. It is an IDENTIFIER_NODE. */
683 tree
684 decl_assembler_name (tree decl)
686 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
687 lang_hooks.set_decl_assembler_name (decl);
688 return DECL_ASSEMBLER_NAME_RAW (decl);
691 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
692 (either of which may be NULL). Inform the FE, if this changes the
693 name. */
695 void
696 overwrite_decl_assembler_name (tree decl, tree name)
698 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
699 lang_hooks.overwrite_decl_assembler_name (decl, name);
702 /* When the target supports COMDAT groups, this indicates which group the
703 DECL is associated with. This can be either an IDENTIFIER_NODE or a
704 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
705 tree
706 decl_comdat_group (const_tree node)
708 struct symtab_node *snode = symtab_node::get (node);
709 if (!snode)
710 return NULL;
711 return snode->get_comdat_group ();
714 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
715 tree
716 decl_comdat_group_id (const_tree node)
718 struct symtab_node *snode = symtab_node::get (node);
719 if (!snode)
720 return NULL;
721 return snode->get_comdat_group_id ();
724 /* When the target supports named section, return its name as IDENTIFIER_NODE
725 or NULL if it is in no section. */
726 const char *
727 decl_section_name (const_tree node)
729 struct symtab_node *snode = symtab_node::get (node);
730 if (!snode)
731 return NULL;
732 return snode->get_section ();
735 /* Set section name of NODE to VALUE (that is expected to be
736 identifier node) */
737 void
738 set_decl_section_name (tree node, const char *value)
740 struct symtab_node *snode;
742 if (value == NULL)
744 snode = symtab_node::get (node);
745 if (!snode)
746 return;
748 else if (VAR_P (node))
749 snode = varpool_node::get_create (node);
750 else
751 snode = cgraph_node::get_create (node);
752 snode->set_section (value);
755 /* Return TLS model of a variable NODE. */
756 enum tls_model
757 decl_tls_model (const_tree node)
759 struct varpool_node *snode = varpool_node::get (node);
760 if (!snode)
761 return TLS_MODEL_NONE;
762 return snode->tls_model;
765 /* Set TLS model of variable NODE to MODEL. */
766 void
767 set_decl_tls_model (tree node, enum tls_model model)
769 struct varpool_node *vnode;
771 if (model == TLS_MODEL_NONE)
773 vnode = varpool_node::get (node);
774 if (!vnode)
775 return;
777 else
778 vnode = varpool_node::get_create (node);
779 vnode->tls_model = model;
782 /* Compute the number of bytes occupied by a tree with code CODE.
783 This function cannot be used for nodes that have variable sizes,
784 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
785 size_t
786 tree_code_size (enum tree_code code)
788 switch (TREE_CODE_CLASS (code))
790 case tcc_declaration: /* A decl node */
791 switch (code)
793 case FIELD_DECL: return sizeof (tree_field_decl);
794 case PARM_DECL: return sizeof (tree_parm_decl);
795 case VAR_DECL: return sizeof (tree_var_decl);
796 case LABEL_DECL: return sizeof (tree_label_decl);
797 case RESULT_DECL: return sizeof (tree_result_decl);
798 case CONST_DECL: return sizeof (tree_const_decl);
799 case TYPE_DECL: return sizeof (tree_type_decl);
800 case FUNCTION_DECL: return sizeof (tree_function_decl);
801 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
802 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
803 case NAMESPACE_DECL:
804 case IMPORTED_DECL:
805 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
806 default:
807 gcc_checking_assert (code >= NUM_TREE_CODES);
808 return lang_hooks.tree_size (code);
811 case tcc_type: /* a type node */
812 switch (code)
814 case OFFSET_TYPE:
815 case ENUMERAL_TYPE:
816 case BOOLEAN_TYPE:
817 case INTEGER_TYPE:
818 case REAL_TYPE:
819 case POINTER_TYPE:
820 case REFERENCE_TYPE:
821 case NULLPTR_TYPE:
822 case FIXED_POINT_TYPE:
823 case COMPLEX_TYPE:
824 case VECTOR_TYPE:
825 case ARRAY_TYPE:
826 case RECORD_TYPE:
827 case UNION_TYPE:
828 case QUAL_UNION_TYPE:
829 case VOID_TYPE:
830 case POINTER_BOUNDS_TYPE:
831 case FUNCTION_TYPE:
832 case METHOD_TYPE:
833 case LANG_TYPE: return sizeof (tree_type_non_common);
834 default:
835 gcc_checking_assert (code >= NUM_TREE_CODES);
836 return lang_hooks.tree_size (code);
839 case tcc_reference: /* a reference */
840 case tcc_expression: /* an expression */
841 case tcc_statement: /* an expression with side effects */
842 case tcc_comparison: /* a comparison expression */
843 case tcc_unary: /* a unary arithmetic expression */
844 case tcc_binary: /* a binary arithmetic expression */
845 return (sizeof (struct tree_exp)
846 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
848 case tcc_constant: /* a constant */
849 switch (code)
851 case VOID_CST: return sizeof (tree_typed);
852 case INTEGER_CST: gcc_unreachable ();
853 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
854 case REAL_CST: return sizeof (tree_real_cst);
855 case FIXED_CST: return sizeof (tree_fixed_cst);
856 case COMPLEX_CST: return sizeof (tree_complex);
857 case VECTOR_CST: gcc_unreachable ();
858 case STRING_CST: gcc_unreachable ();
859 default:
860 gcc_checking_assert (code >= NUM_TREE_CODES);
861 return lang_hooks.tree_size (code);
864 case tcc_exceptional: /* something random, like an identifier. */
865 switch (code)
867 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
868 case TREE_LIST: return sizeof (tree_list);
870 case ERROR_MARK:
871 case PLACEHOLDER_EXPR: return sizeof (tree_common);
873 case TREE_VEC: gcc_unreachable ();
874 case OMP_CLAUSE: gcc_unreachable ();
876 case SSA_NAME: return sizeof (tree_ssa_name);
878 case STATEMENT_LIST: return sizeof (tree_statement_list);
879 case BLOCK: return sizeof (struct tree_block);
880 case CONSTRUCTOR: return sizeof (tree_constructor);
881 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
882 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
884 default:
885 gcc_checking_assert (code >= NUM_TREE_CODES);
886 return lang_hooks.tree_size (code);
889 default:
890 gcc_unreachable ();
894 /* Compute the number of bytes occupied by NODE. This routine only
895 looks at TREE_CODE, except for those nodes that have variable sizes. */
896 size_t
897 tree_size (const_tree node)
899 const enum tree_code code = TREE_CODE (node);
900 switch (code)
902 case INTEGER_CST:
903 return (sizeof (struct tree_int_cst)
904 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
906 case TREE_BINFO:
907 return (offsetof (struct tree_binfo, base_binfos)
908 + vec<tree, va_gc>
909 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
911 case TREE_VEC:
912 return (sizeof (struct tree_vec)
913 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
915 case VECTOR_CST:
916 return (sizeof (struct tree_vector)
917 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
919 case STRING_CST:
920 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
922 case OMP_CLAUSE:
923 return (sizeof (struct tree_omp_clause)
924 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
925 * sizeof (tree));
927 default:
928 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
929 return (sizeof (struct tree_exp)
930 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
931 else
932 return tree_code_size (code);
936 /* Return tree node kind based on tree CODE. */
938 static tree_node_kind
939 get_stats_node_kind (enum tree_code code)
941 enum tree_code_class type = TREE_CODE_CLASS (code);
943 switch (type)
945 case tcc_declaration: /* A decl node */
946 return d_kind;
947 case tcc_type: /* a type node */
948 return t_kind;
949 case tcc_statement: /* an expression with side effects */
950 return s_kind;
951 case tcc_reference: /* a reference */
952 return r_kind;
953 case tcc_expression: /* an expression */
954 case tcc_comparison: /* a comparison expression */
955 case tcc_unary: /* a unary arithmetic expression */
956 case tcc_binary: /* a binary arithmetic expression */
957 return e_kind;
958 case tcc_constant: /* a constant */
959 return c_kind;
960 case tcc_exceptional: /* something random, like an identifier. */
961 switch (code)
963 case IDENTIFIER_NODE:
964 return id_kind;
965 case TREE_VEC:
966 return vec_kind;
967 case TREE_BINFO:
968 return binfo_kind;
969 case SSA_NAME:
970 return ssa_name_kind;
971 case BLOCK:
972 return b_kind;
973 case CONSTRUCTOR:
974 return constr_kind;
975 case OMP_CLAUSE:
976 return omp_clause_kind;
977 default:
978 return x_kind;
980 break;
981 case tcc_vl_exp:
982 return e_kind;
983 default:
984 gcc_unreachable ();
988 /* Record interesting allocation statistics for a tree node with CODE
989 and LENGTH. */
991 static void
992 record_node_allocation_statistics (enum tree_code code, size_t length)
994 if (!GATHER_STATISTICS)
995 return;
997 tree_node_kind kind = get_stats_node_kind (code);
999 tree_code_counts[(int) code]++;
1000 tree_node_counts[(int) kind]++;
1001 tree_node_sizes[(int) kind] += length;
1004 /* Allocate and return a new UID from the DECL_UID namespace. */
1007 allocate_decl_uid (void)
1009 return next_decl_uid++;
1012 /* Return a newly allocated node of code CODE. For decl and type
1013 nodes, some other fields are initialized. The rest of the node is
1014 initialized to zero. This function cannot be used for TREE_VEC,
1015 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1016 tree_code_size.
1018 Achoo! I got a code in the node. */
1020 tree
1021 make_node (enum tree_code code MEM_STAT_DECL)
1023 tree t;
1024 enum tree_code_class type = TREE_CODE_CLASS (code);
1025 size_t length = tree_code_size (code);
1027 record_node_allocation_statistics (code, length);
1029 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1030 TREE_SET_CODE (t, code);
1032 switch (type)
1034 case tcc_statement:
1035 if (code != DEBUG_BEGIN_STMT)
1036 TREE_SIDE_EFFECTS (t) = 1;
1037 break;
1039 case tcc_declaration:
1040 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1042 if (code == FUNCTION_DECL)
1044 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1045 SET_DECL_MODE (t, FUNCTION_MODE);
1047 else
1048 SET_DECL_ALIGN (t, 1);
1050 DECL_SOURCE_LOCATION (t) = input_location;
1051 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1052 DECL_UID (t) = --next_debug_decl_uid;
1053 else
1055 DECL_UID (t) = allocate_decl_uid ();
1056 SET_DECL_PT_UID (t, -1);
1058 if (TREE_CODE (t) == LABEL_DECL)
1059 LABEL_DECL_UID (t) = -1;
1061 break;
1063 case tcc_type:
1064 TYPE_UID (t) = next_type_uid++;
1065 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1066 TYPE_USER_ALIGN (t) = 0;
1067 TYPE_MAIN_VARIANT (t) = t;
1068 TYPE_CANONICAL (t) = t;
1070 /* Default to no attributes for type, but let target change that. */
1071 TYPE_ATTRIBUTES (t) = NULL_TREE;
1072 targetm.set_default_type_attributes (t);
1074 /* We have not yet computed the alias set for this type. */
1075 TYPE_ALIAS_SET (t) = -1;
1076 break;
1078 case tcc_constant:
1079 TREE_CONSTANT (t) = 1;
1080 break;
1082 case tcc_expression:
1083 switch (code)
1085 case INIT_EXPR:
1086 case MODIFY_EXPR:
1087 case VA_ARG_EXPR:
1088 case PREDECREMENT_EXPR:
1089 case PREINCREMENT_EXPR:
1090 case POSTDECREMENT_EXPR:
1091 case POSTINCREMENT_EXPR:
1092 /* All of these have side-effects, no matter what their
1093 operands are. */
1094 TREE_SIDE_EFFECTS (t) = 1;
1095 break;
1097 default:
1098 break;
1100 break;
1102 case tcc_exceptional:
1103 switch (code)
1105 case TARGET_OPTION_NODE:
1106 TREE_TARGET_OPTION(t)
1107 = ggc_cleared_alloc<struct cl_target_option> ();
1108 break;
1110 case OPTIMIZATION_NODE:
1111 TREE_OPTIMIZATION (t)
1112 = ggc_cleared_alloc<struct cl_optimization> ();
1113 break;
1115 default:
1116 break;
1118 break;
1120 default:
1121 /* Other classes need no special treatment. */
1122 break;
1125 return t;
1128 /* Free tree node. */
1130 void
1131 free_node (tree node)
1133 enum tree_code code = TREE_CODE (node);
1134 if (GATHER_STATISTICS)
1136 enum tree_node_kind kind = get_stats_node_kind (code);
1138 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1139 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1140 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1142 tree_code_counts[(int) TREE_CODE (node)]--;
1143 tree_node_counts[(int) kind]--;
1144 tree_node_sizes[(int) kind] -= tree_size (node);
1146 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1147 vec_free (CONSTRUCTOR_ELTS (node));
1148 else if (code == BLOCK)
1149 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1150 else if (code == TREE_BINFO)
1151 vec_free (BINFO_BASE_ACCESSES (node));
1152 ggc_free (node);
1155 /* Return a new node with the same contents as NODE except that its
1156 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1158 tree
1159 copy_node (tree node MEM_STAT_DECL)
1161 tree t;
1162 enum tree_code code = TREE_CODE (node);
1163 size_t length;
1165 gcc_assert (code != STATEMENT_LIST);
1167 length = tree_size (node);
1168 record_node_allocation_statistics (code, length);
1169 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1170 memcpy (t, node, length);
1172 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1173 TREE_CHAIN (t) = 0;
1174 TREE_ASM_WRITTEN (t) = 0;
1175 TREE_VISITED (t) = 0;
1177 if (TREE_CODE_CLASS (code) == tcc_declaration)
1179 if (code == DEBUG_EXPR_DECL)
1180 DECL_UID (t) = --next_debug_decl_uid;
1181 else
1183 DECL_UID (t) = allocate_decl_uid ();
1184 if (DECL_PT_UID_SET_P (node))
1185 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1187 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1188 && DECL_HAS_VALUE_EXPR_P (node))
1190 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1191 DECL_HAS_VALUE_EXPR_P (t) = 1;
1193 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1194 if (VAR_P (node))
1196 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1197 t->decl_with_vis.symtab_node = NULL;
1199 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1201 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1202 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1204 if (TREE_CODE (node) == FUNCTION_DECL)
1206 DECL_STRUCT_FUNCTION (t) = NULL;
1207 t->decl_with_vis.symtab_node = NULL;
1210 else if (TREE_CODE_CLASS (code) == tcc_type)
1212 TYPE_UID (t) = next_type_uid++;
1213 /* The following is so that the debug code for
1214 the copy is different from the original type.
1215 The two statements usually duplicate each other
1216 (because they clear fields of the same union),
1217 but the optimizer should catch that. */
1218 TYPE_SYMTAB_ADDRESS (t) = 0;
1219 TYPE_SYMTAB_DIE (t) = 0;
1221 /* Do not copy the values cache. */
1222 if (TYPE_CACHED_VALUES_P (t))
1224 TYPE_CACHED_VALUES_P (t) = 0;
1225 TYPE_CACHED_VALUES (t) = NULL_TREE;
1228 else if (code == TARGET_OPTION_NODE)
1230 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1231 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1232 sizeof (struct cl_target_option));
1234 else if (code == OPTIMIZATION_NODE)
1236 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1237 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1238 sizeof (struct cl_optimization));
1241 return t;
1244 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1245 For example, this can copy a list made of TREE_LIST nodes. */
1247 tree
1248 copy_list (tree list)
1250 tree head;
1251 tree prev, next;
1253 if (list == 0)
1254 return 0;
1256 head = prev = copy_node (list);
1257 next = TREE_CHAIN (list);
1258 while (next)
1260 TREE_CHAIN (prev) = copy_node (next);
1261 prev = TREE_CHAIN (prev);
1262 next = TREE_CHAIN (next);
1264 return head;
1268 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1269 INTEGER_CST with value CST and type TYPE. */
1271 static unsigned int
1272 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1274 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1275 /* We need extra HWIs if CST is an unsigned integer with its
1276 upper bit set. */
1277 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1278 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1279 return cst.get_len ();
1282 /* Return a new INTEGER_CST with value CST and type TYPE. */
1284 static tree
1285 build_new_int_cst (tree type, const wide_int &cst)
1287 unsigned int len = cst.get_len ();
1288 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1289 tree nt = make_int_cst (len, ext_len);
1291 if (len < ext_len)
1293 --ext_len;
1294 TREE_INT_CST_ELT (nt, ext_len)
1295 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1296 for (unsigned int i = len; i < ext_len; ++i)
1297 TREE_INT_CST_ELT (nt, i) = -1;
1299 else if (TYPE_UNSIGNED (type)
1300 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1302 len--;
1303 TREE_INT_CST_ELT (nt, len)
1304 = zext_hwi (cst.elt (len),
1305 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1308 for (unsigned int i = 0; i < len; i++)
1309 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1310 TREE_TYPE (nt) = type;
1311 return nt;
1314 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1316 static tree
1317 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1318 CXX_MEM_STAT_INFO)
1320 size_t length = sizeof (struct tree_poly_int_cst);
1321 record_node_allocation_statistics (POLY_INT_CST, length);
1323 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1325 TREE_SET_CODE (t, POLY_INT_CST);
1326 TREE_CONSTANT (t) = 1;
1327 TREE_TYPE (t) = type;
1328 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1329 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1330 return t;
1333 /* Create a constant tree that contains CST sign-extended to TYPE. */
1335 tree
1336 build_int_cst (tree type, poly_int64 cst)
1338 /* Support legacy code. */
1339 if (!type)
1340 type = integer_type_node;
1342 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1345 /* Create a constant tree that contains CST zero-extended to TYPE. */
1347 tree
1348 build_int_cstu (tree type, poly_uint64 cst)
1350 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1353 /* Create a constant tree that contains CST sign-extended to TYPE. */
1355 tree
1356 build_int_cst_type (tree type, poly_int64 cst)
1358 gcc_assert (type);
1359 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1362 /* Constructs tree in type TYPE from with value given by CST. Signedness
1363 of CST is assumed to be the same as the signedness of TYPE. */
1365 tree
1366 double_int_to_tree (tree type, double_int cst)
1368 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1371 /* We force the wide_int CST to the range of the type TYPE by sign or
1372 zero extending it. OVERFLOWABLE indicates if we are interested in
1373 overflow of the value, when >0 we are only interested in signed
1374 overflow, for <0 we are interested in any overflow. OVERFLOWED
1375 indicates whether overflow has already occurred. CONST_OVERFLOWED
1376 indicates whether constant overflow has already occurred. We force
1377 T's value to be within range of T's type (by setting to 0 or 1 all
1378 the bits outside the type's range). We set TREE_OVERFLOWED if,
1379 OVERFLOWED is nonzero,
1380 or OVERFLOWABLE is >0 and signed overflow occurs
1381 or OVERFLOWABLE is <0 and any overflow occurs
1382 We return a new tree node for the extended wide_int. The node
1383 is shared if no overflow flags are set. */
1386 tree
1387 force_fit_type (tree type, const poly_wide_int_ref &cst,
1388 int overflowable, bool overflowed)
1390 signop sign = TYPE_SIGN (type);
1392 /* If we need to set overflow flags, return a new unshared node. */
1393 if (overflowed || !wi::fits_to_tree_p (cst, type))
1395 if (overflowed
1396 || overflowable < 0
1397 || (overflowable > 0 && sign == SIGNED))
1399 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1400 sign);
1401 tree t;
1402 if (tmp.is_constant ())
1403 t = build_new_int_cst (type, tmp.coeffs[0]);
1404 else
1406 tree coeffs[NUM_POLY_INT_COEFFS];
1407 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1409 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1410 TREE_OVERFLOW (coeffs[i]) = 1;
1412 t = build_new_poly_int_cst (type, coeffs);
1414 TREE_OVERFLOW (t) = 1;
1415 return t;
1419 /* Else build a shared node. */
1420 return wide_int_to_tree (type, cst);
1423 /* These are the hash table functions for the hash table of INTEGER_CST
1424 nodes of a sizetype. */
1426 /* Return the hash code X, an INTEGER_CST. */
1428 hashval_t
1429 int_cst_hasher::hash (tree x)
1431 const_tree const t = x;
1432 hashval_t code = TYPE_UID (TREE_TYPE (t));
1433 int i;
1435 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1436 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1438 return code;
1441 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1442 is the same as that given by *Y, which is the same. */
1444 bool
1445 int_cst_hasher::equal (tree x, tree y)
1447 const_tree const xt = x;
1448 const_tree const yt = y;
1450 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1451 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1452 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1453 return false;
1455 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1456 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1457 return false;
1459 return true;
1462 /* Create an INT_CST node of TYPE and value CST.
1463 The returned node is always shared. For small integers we use a
1464 per-type vector cache, for larger ones we use a single hash table.
1465 The value is extended from its precision according to the sign of
1466 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1467 the upper bits and ensures that hashing and value equality based
1468 upon the underlying HOST_WIDE_INTs works without masking. */
1470 static tree
1471 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1473 tree t;
1474 int ix = -1;
1475 int limit = 0;
1477 gcc_assert (type);
1478 unsigned int prec = TYPE_PRECISION (type);
1479 signop sgn = TYPE_SIGN (type);
1481 /* Verify that everything is canonical. */
1482 int l = pcst.get_len ();
1483 if (l > 1)
1485 if (pcst.elt (l - 1) == 0)
1486 gcc_checking_assert (pcst.elt (l - 2) < 0);
1487 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1488 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1491 wide_int cst = wide_int::from (pcst, prec, sgn);
1492 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1494 if (ext_len == 1)
1496 /* We just need to store a single HOST_WIDE_INT. */
1497 HOST_WIDE_INT hwi;
1498 if (TYPE_UNSIGNED (type))
1499 hwi = cst.to_uhwi ();
1500 else
1501 hwi = cst.to_shwi ();
1503 switch (TREE_CODE (type))
1505 case NULLPTR_TYPE:
1506 gcc_assert (hwi == 0);
1507 /* Fallthru. */
1509 case POINTER_TYPE:
1510 case REFERENCE_TYPE:
1511 case POINTER_BOUNDS_TYPE:
1512 /* Cache NULL pointer and zero bounds. */
1513 if (hwi == 0)
1515 limit = 1;
1516 ix = 0;
1518 break;
1520 case BOOLEAN_TYPE:
1521 /* Cache false or true. */
1522 limit = 2;
1523 if (IN_RANGE (hwi, 0, 1))
1524 ix = hwi;
1525 break;
1527 case INTEGER_TYPE:
1528 case OFFSET_TYPE:
1529 if (TYPE_SIGN (type) == UNSIGNED)
1531 /* Cache [0, N). */
1532 limit = INTEGER_SHARE_LIMIT;
1533 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1534 ix = hwi;
1536 else
1538 /* Cache [-1, N). */
1539 limit = INTEGER_SHARE_LIMIT + 1;
1540 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1541 ix = hwi + 1;
1543 break;
1545 case ENUMERAL_TYPE:
1546 break;
1548 default:
1549 gcc_unreachable ();
1552 if (ix >= 0)
1554 /* Look for it in the type's vector of small shared ints. */
1555 if (!TYPE_CACHED_VALUES_P (type))
1557 TYPE_CACHED_VALUES_P (type) = 1;
1558 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1561 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1562 if (t)
1563 /* Make sure no one is clobbering the shared constant. */
1564 gcc_checking_assert (TREE_TYPE (t) == type
1565 && TREE_INT_CST_NUNITS (t) == 1
1566 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1567 && TREE_INT_CST_EXT_NUNITS (t) == 1
1568 && TREE_INT_CST_ELT (t, 0) == hwi);
1569 else
1571 /* Create a new shared int. */
1572 t = build_new_int_cst (type, cst);
1573 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1576 else
1578 /* Use the cache of larger shared ints, using int_cst_node as
1579 a temporary. */
1581 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1582 TREE_TYPE (int_cst_node) = type;
1584 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1585 t = *slot;
1586 if (!t)
1588 /* Insert this one into the hash table. */
1589 t = int_cst_node;
1590 *slot = t;
1591 /* Make a new node for next time round. */
1592 int_cst_node = make_int_cst (1, 1);
1596 else
1598 /* The value either hashes properly or we drop it on the floor
1599 for the gc to take care of. There will not be enough of them
1600 to worry about. */
1602 tree nt = build_new_int_cst (type, cst);
1603 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1604 t = *slot;
1605 if (!t)
1607 /* Insert this one into the hash table. */
1608 t = nt;
1609 *slot = t;
1611 else
1612 ggc_free (nt);
1615 return t;
1618 hashval_t
1619 poly_int_cst_hasher::hash (tree t)
1621 inchash::hash hstate;
1623 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1624 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1625 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1627 return hstate.end ();
1630 bool
1631 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1633 if (TREE_TYPE (x) != y.first)
1634 return false;
1635 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1636 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1637 return false;
1638 return true;
1641 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1642 The elements must also have type TYPE. */
1644 tree
1645 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1647 unsigned int prec = TYPE_PRECISION (type);
1648 gcc_assert (prec <= values.coeffs[0].get_precision ());
1649 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1651 inchash::hash h;
1652 h.add_int (TYPE_UID (type));
1653 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1654 h.add_wide_int (c.coeffs[i]);
1655 poly_int_cst_hasher::compare_type comp (type, &c);
1656 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1657 INSERT);
1658 if (*slot == NULL_TREE)
1660 tree coeffs[NUM_POLY_INT_COEFFS];
1661 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1662 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1663 *slot = build_new_poly_int_cst (type, coeffs);
1665 return *slot;
1668 /* Create a constant tree with value VALUE in type TYPE. */
1670 tree
1671 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1673 if (value.is_constant ())
1674 return wide_int_to_tree_1 (type, value.coeffs[0]);
1675 return build_poly_int_cst (type, value);
1678 void
1679 cache_integer_cst (tree t)
1681 tree type = TREE_TYPE (t);
1682 int ix = -1;
1683 int limit = 0;
1684 int prec = TYPE_PRECISION (type);
1686 gcc_assert (!TREE_OVERFLOW (t));
1688 switch (TREE_CODE (type))
1690 case NULLPTR_TYPE:
1691 gcc_assert (integer_zerop (t));
1692 /* Fallthru. */
1694 case POINTER_TYPE:
1695 case REFERENCE_TYPE:
1696 /* Cache NULL pointer. */
1697 if (integer_zerop (t))
1699 limit = 1;
1700 ix = 0;
1702 break;
1704 case BOOLEAN_TYPE:
1705 /* Cache false or true. */
1706 limit = 2;
1707 if (wi::ltu_p (wi::to_wide (t), 2))
1708 ix = TREE_INT_CST_ELT (t, 0);
1709 break;
1711 case INTEGER_TYPE:
1712 case OFFSET_TYPE:
1713 if (TYPE_UNSIGNED (type))
1715 /* Cache 0..N */
1716 limit = INTEGER_SHARE_LIMIT;
1718 /* This is a little hokie, but if the prec is smaller than
1719 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1720 obvious test will not get the correct answer. */
1721 if (prec < HOST_BITS_PER_WIDE_INT)
1723 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1724 ix = tree_to_uhwi (t);
1726 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1727 ix = tree_to_uhwi (t);
1729 else
1731 /* Cache -1..N */
1732 limit = INTEGER_SHARE_LIMIT + 1;
1734 if (integer_minus_onep (t))
1735 ix = 0;
1736 else if (!wi::neg_p (wi::to_wide (t)))
1738 if (prec < HOST_BITS_PER_WIDE_INT)
1740 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1741 ix = tree_to_shwi (t) + 1;
1743 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1744 ix = tree_to_shwi (t) + 1;
1747 break;
1749 case ENUMERAL_TYPE:
1750 break;
1752 default:
1753 gcc_unreachable ();
1756 if (ix >= 0)
1758 /* Look for it in the type's vector of small shared ints. */
1759 if (!TYPE_CACHED_VALUES_P (type))
1761 TYPE_CACHED_VALUES_P (type) = 1;
1762 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1765 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1766 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1768 else
1770 /* Use the cache of larger shared ints. */
1771 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1772 /* If there is already an entry for the number verify it's the
1773 same. */
1774 if (*slot)
1775 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1776 else
1777 /* Otherwise insert this one into the hash table. */
1778 *slot = t;
1783 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1784 and the rest are zeros. */
1786 tree
1787 build_low_bits_mask (tree type, unsigned bits)
1789 gcc_assert (bits <= TYPE_PRECISION (type));
1791 return wide_int_to_tree (type, wi::mask (bits, false,
1792 TYPE_PRECISION (type)));
1795 /* Checks that X is integer constant that can be expressed in (unsigned)
1796 HOST_WIDE_INT without loss of precision. */
1798 bool
1799 cst_and_fits_in_hwi (const_tree x)
1801 return (TREE_CODE (x) == INTEGER_CST
1802 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1805 /* Build a newly constructed VECTOR_CST with the given values of
1806 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1808 tree
1809 make_vector (unsigned log2_npatterns,
1810 unsigned int nelts_per_pattern MEM_STAT_DECL)
1812 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1813 tree t;
1814 unsigned npatterns = 1 << log2_npatterns;
1815 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1816 unsigned length = (sizeof (struct tree_vector)
1817 + (encoded_nelts - 1) * sizeof (tree));
1819 record_node_allocation_statistics (VECTOR_CST, length);
1821 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1823 TREE_SET_CODE (t, VECTOR_CST);
1824 TREE_CONSTANT (t) = 1;
1825 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1826 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1828 return t;
1831 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1832 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1834 tree
1835 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1837 unsigned HOST_WIDE_INT idx, nelts;
1838 tree value;
1840 /* We can't construct a VECTOR_CST for a variable number of elements. */
1841 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1842 tree_vector_builder vec (type, nelts, 1);
1843 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1845 if (TREE_CODE (value) == VECTOR_CST)
1847 /* If NELTS is constant then this must be too. */
1848 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1849 for (unsigned i = 0; i < sub_nelts; ++i)
1850 vec.quick_push (VECTOR_CST_ELT (value, i));
1852 else
1853 vec.quick_push (value);
1855 while (vec.length () < nelts)
1856 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1858 return vec.build ();
1861 /* Build a vector of type VECTYPE where all the elements are SCs. */
1862 tree
1863 build_vector_from_val (tree vectype, tree sc)
1865 unsigned HOST_WIDE_INT i, nunits;
1867 if (sc == error_mark_node)
1868 return sc;
1870 /* Verify that the vector type is suitable for SC. Note that there
1871 is some inconsistency in the type-system with respect to restrict
1872 qualifications of pointers. Vector types always have a main-variant
1873 element type and the qualification is applied to the vector-type.
1874 So TREE_TYPE (vector-type) does not return a properly qualified
1875 vector element-type. */
1876 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1877 TREE_TYPE (vectype)));
1879 if (CONSTANT_CLASS_P (sc))
1881 tree_vector_builder v (vectype, 1, 1);
1882 v.quick_push (sc);
1883 return v.build ();
1885 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1886 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1887 else
1889 vec<constructor_elt, va_gc> *v;
1890 vec_alloc (v, nunits);
1891 for (i = 0; i < nunits; ++i)
1892 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1893 return build_constructor (vectype, v);
1897 /* Build a vector series of type TYPE in which element I has the value
1898 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1899 and a VEC_SERIES_EXPR otherwise. */
1901 tree
1902 build_vec_series (tree type, tree base, tree step)
1904 if (integer_zerop (step))
1905 return build_vector_from_val (type, base);
1906 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1908 tree_vector_builder builder (type, 1, 3);
1909 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1910 wi::to_wide (base) + wi::to_wide (step));
1911 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1912 wi::to_wide (elt1) + wi::to_wide (step));
1913 builder.quick_push (base);
1914 builder.quick_push (elt1);
1915 builder.quick_push (elt2);
1916 return builder.build ();
1918 return build2 (VEC_SERIES_EXPR, type, base, step);
1921 /* Return a vector with the same number of units and number of bits
1922 as VEC_TYPE, but in which the elements are a linear series of unsigned
1923 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1925 tree
1926 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1928 tree index_vec_type = vec_type;
1929 tree index_elt_type = TREE_TYPE (vec_type);
1930 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1931 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1933 index_elt_type = build_nonstandard_integer_type
1934 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1935 index_vec_type = build_vector_type (index_elt_type, nunits);
1938 tree_vector_builder v (index_vec_type, 1, 3);
1939 for (unsigned int i = 0; i < 3; ++i)
1940 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1941 return v.build ();
1944 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1945 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1947 void
1948 recompute_constructor_flags (tree c)
1950 unsigned int i;
1951 tree val;
1952 bool constant_p = true;
1953 bool side_effects_p = false;
1954 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1956 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1958 /* Mostly ctors will have elts that don't have side-effects, so
1959 the usual case is to scan all the elements. Hence a single
1960 loop for both const and side effects, rather than one loop
1961 each (with early outs). */
1962 if (!TREE_CONSTANT (val))
1963 constant_p = false;
1964 if (TREE_SIDE_EFFECTS (val))
1965 side_effects_p = true;
1968 TREE_SIDE_EFFECTS (c) = side_effects_p;
1969 TREE_CONSTANT (c) = constant_p;
1972 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1973 CONSTRUCTOR C. */
1975 void
1976 verify_constructor_flags (tree c)
1978 unsigned int i;
1979 tree val;
1980 bool constant_p = TREE_CONSTANT (c);
1981 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1982 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1984 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1986 if (constant_p && !TREE_CONSTANT (val))
1987 internal_error ("non-constant element in constant CONSTRUCTOR");
1988 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1989 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1993 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1994 are in the vec pointed to by VALS. */
1995 tree
1996 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1998 tree c = make_node (CONSTRUCTOR);
2000 TREE_TYPE (c) = type;
2001 CONSTRUCTOR_ELTS (c) = vals;
2003 recompute_constructor_flags (c);
2005 return c;
2008 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2009 INDEX and VALUE. */
2010 tree
2011 build_constructor_single (tree type, tree index, tree value)
2013 vec<constructor_elt, va_gc> *v;
2014 constructor_elt elt = {index, value};
2016 vec_alloc (v, 1);
2017 v->quick_push (elt);
2019 return build_constructor (type, v);
2023 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2024 are in a list pointed to by VALS. */
2025 tree
2026 build_constructor_from_list (tree type, tree vals)
2028 tree t;
2029 vec<constructor_elt, va_gc> *v = NULL;
2031 if (vals)
2033 vec_alloc (v, list_length (vals));
2034 for (t = vals; t; t = TREE_CHAIN (t))
2035 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2038 return build_constructor (type, v);
2041 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2042 of elements, provided as index/value pairs. */
2044 tree
2045 build_constructor_va (tree type, int nelts, ...)
2047 vec<constructor_elt, va_gc> *v = NULL;
2048 va_list p;
2050 va_start (p, nelts);
2051 vec_alloc (v, nelts);
2052 while (nelts--)
2054 tree index = va_arg (p, tree);
2055 tree value = va_arg (p, tree);
2056 CONSTRUCTOR_APPEND_ELT (v, index, value);
2058 va_end (p);
2059 return build_constructor (type, v);
2062 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2064 tree
2065 build_fixed (tree type, FIXED_VALUE_TYPE f)
2067 tree v;
2068 FIXED_VALUE_TYPE *fp;
2070 v = make_node (FIXED_CST);
2071 fp = ggc_alloc<fixed_value> ();
2072 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2074 TREE_TYPE (v) = type;
2075 TREE_FIXED_CST_PTR (v) = fp;
2076 return v;
2079 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2081 tree
2082 build_real (tree type, REAL_VALUE_TYPE d)
2084 tree v;
2085 REAL_VALUE_TYPE *dp;
2086 int overflow = 0;
2088 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2089 Consider doing it via real_convert now. */
2091 v = make_node (REAL_CST);
2092 dp = ggc_alloc<real_value> ();
2093 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2095 TREE_TYPE (v) = type;
2096 TREE_REAL_CST_PTR (v) = dp;
2097 TREE_OVERFLOW (v) = overflow;
2098 return v;
2101 /* Like build_real, but first truncate D to the type. */
2103 tree
2104 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2106 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2109 /* Return a new REAL_CST node whose type is TYPE
2110 and whose value is the integer value of the INTEGER_CST node I. */
2112 REAL_VALUE_TYPE
2113 real_value_from_int_cst (const_tree type, const_tree i)
2115 REAL_VALUE_TYPE d;
2117 /* Clear all bits of the real value type so that we can later do
2118 bitwise comparisons to see if two values are the same. */
2119 memset (&d, 0, sizeof d);
2121 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2122 TYPE_SIGN (TREE_TYPE (i)));
2123 return d;
2126 /* Given a tree representing an integer constant I, return a tree
2127 representing the same value as a floating-point constant of type TYPE. */
2129 tree
2130 build_real_from_int_cst (tree type, const_tree i)
2132 tree v;
2133 int overflow = TREE_OVERFLOW (i);
2135 v = build_real (type, real_value_from_int_cst (type, i));
2137 TREE_OVERFLOW (v) |= overflow;
2138 return v;
2141 /* Return a newly constructed STRING_CST node whose value is
2142 the LEN characters at STR.
2143 Note that for a C string literal, LEN should include the trailing NUL.
2144 The TREE_TYPE is not initialized. */
2146 tree
2147 build_string (int len, const char *str)
2149 tree s;
2150 size_t length;
2152 /* Do not waste bytes provided by padding of struct tree_string. */
2153 length = len + offsetof (struct tree_string, str) + 1;
2155 record_node_allocation_statistics (STRING_CST, length);
2157 s = (tree) ggc_internal_alloc (length);
2159 memset (s, 0, sizeof (struct tree_typed));
2160 TREE_SET_CODE (s, STRING_CST);
2161 TREE_CONSTANT (s) = 1;
2162 TREE_STRING_LENGTH (s) = len;
2163 memcpy (s->string.str, str, len);
2164 s->string.str[len] = '\0';
2166 return s;
2169 /* Return a newly constructed COMPLEX_CST node whose value is
2170 specified by the real and imaginary parts REAL and IMAG.
2171 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2172 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2174 tree
2175 build_complex (tree type, tree real, tree imag)
2177 tree t = make_node (COMPLEX_CST);
2179 TREE_REALPART (t) = real;
2180 TREE_IMAGPART (t) = imag;
2181 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2182 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2183 return t;
2186 /* Build a complex (inf +- 0i), such as for the result of cproj.
2187 TYPE is the complex tree type of the result. If NEG is true, the
2188 imaginary zero is negative. */
2190 tree
2191 build_complex_inf (tree type, bool neg)
2193 REAL_VALUE_TYPE rinf, rzero = dconst0;
2195 real_inf (&rinf);
2196 rzero.sign = neg;
2197 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2198 build_real (TREE_TYPE (type), rzero));
2201 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2202 element is set to 1. In particular, this is 1 + i for complex types. */
2204 tree
2205 build_each_one_cst (tree type)
2207 if (TREE_CODE (type) == COMPLEX_TYPE)
2209 tree scalar = build_one_cst (TREE_TYPE (type));
2210 return build_complex (type, scalar, scalar);
2212 else
2213 return build_one_cst (type);
2216 /* Return a constant of arithmetic type TYPE which is the
2217 multiplicative identity of the set TYPE. */
2219 tree
2220 build_one_cst (tree type)
2222 switch (TREE_CODE (type))
2224 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2225 case POINTER_TYPE: case REFERENCE_TYPE:
2226 case OFFSET_TYPE:
2227 return build_int_cst (type, 1);
2229 case REAL_TYPE:
2230 return build_real (type, dconst1);
2232 case FIXED_POINT_TYPE:
2233 /* We can only generate 1 for accum types. */
2234 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2235 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2237 case VECTOR_TYPE:
2239 tree scalar = build_one_cst (TREE_TYPE (type));
2241 return build_vector_from_val (type, scalar);
2244 case COMPLEX_TYPE:
2245 return build_complex (type,
2246 build_one_cst (TREE_TYPE (type)),
2247 build_zero_cst (TREE_TYPE (type)));
2249 default:
2250 gcc_unreachable ();
2254 /* Return an integer of type TYPE containing all 1's in as much precision as
2255 it contains, or a complex or vector whose subparts are such integers. */
2257 tree
2258 build_all_ones_cst (tree type)
2260 if (TREE_CODE (type) == COMPLEX_TYPE)
2262 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2263 return build_complex (type, scalar, scalar);
2265 else
2266 return build_minus_one_cst (type);
2269 /* Return a constant of arithmetic type TYPE which is the
2270 opposite of the multiplicative identity of the set TYPE. */
2272 tree
2273 build_minus_one_cst (tree type)
2275 switch (TREE_CODE (type))
2277 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2278 case POINTER_TYPE: case REFERENCE_TYPE:
2279 case OFFSET_TYPE:
2280 return build_int_cst (type, -1);
2282 case REAL_TYPE:
2283 return build_real (type, dconstm1);
2285 case FIXED_POINT_TYPE:
2286 /* We can only generate 1 for accum types. */
2287 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2288 return build_fixed (type,
2289 fixed_from_double_int (double_int_minus_one,
2290 SCALAR_TYPE_MODE (type)));
2292 case VECTOR_TYPE:
2294 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2296 return build_vector_from_val (type, scalar);
2299 case COMPLEX_TYPE:
2300 return build_complex (type,
2301 build_minus_one_cst (TREE_TYPE (type)),
2302 build_zero_cst (TREE_TYPE (type)));
2304 default:
2305 gcc_unreachable ();
2309 /* Build 0 constant of type TYPE. This is used by constructor folding
2310 and thus the constant should be represented in memory by
2311 zero(es). */
2313 tree
2314 build_zero_cst (tree type)
2316 switch (TREE_CODE (type))
2318 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2319 case POINTER_TYPE: case REFERENCE_TYPE:
2320 case OFFSET_TYPE: case NULLPTR_TYPE:
2321 return build_int_cst (type, 0);
2323 case REAL_TYPE:
2324 return build_real (type, dconst0);
2326 case FIXED_POINT_TYPE:
2327 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2329 case VECTOR_TYPE:
2331 tree scalar = build_zero_cst (TREE_TYPE (type));
2333 return build_vector_from_val (type, scalar);
2336 case COMPLEX_TYPE:
2338 tree zero = build_zero_cst (TREE_TYPE (type));
2340 return build_complex (type, zero, zero);
2343 default:
2344 if (!AGGREGATE_TYPE_P (type))
2345 return fold_convert (type, integer_zero_node);
2346 return build_constructor (type, NULL);
2351 /* Build a BINFO with LEN language slots. */
2353 tree
2354 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2356 tree t;
2357 size_t length = (offsetof (struct tree_binfo, base_binfos)
2358 + vec<tree, va_gc>::embedded_size (base_binfos));
2360 record_node_allocation_statistics (TREE_BINFO, length);
2362 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2364 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2366 TREE_SET_CODE (t, TREE_BINFO);
2368 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2370 return t;
2373 /* Create a CASE_LABEL_EXPR tree node and return it. */
2375 tree
2376 build_case_label (tree low_value, tree high_value, tree label_decl)
2378 tree t = make_node (CASE_LABEL_EXPR);
2380 TREE_TYPE (t) = void_type_node;
2381 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2383 CASE_LOW (t) = low_value;
2384 CASE_HIGH (t) = high_value;
2385 CASE_LABEL (t) = label_decl;
2386 CASE_CHAIN (t) = NULL_TREE;
2388 return t;
2391 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2392 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2393 The latter determines the length of the HOST_WIDE_INT vector. */
2395 tree
2396 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2398 tree t;
2399 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2400 + sizeof (struct tree_int_cst));
2402 gcc_assert (len);
2403 record_node_allocation_statistics (INTEGER_CST, length);
2405 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2407 TREE_SET_CODE (t, INTEGER_CST);
2408 TREE_INT_CST_NUNITS (t) = len;
2409 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2410 /* to_offset can only be applied to trees that are offset_int-sized
2411 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2412 must be exactly the precision of offset_int and so LEN is correct. */
2413 if (ext_len <= OFFSET_INT_ELTS)
2414 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2415 else
2416 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2418 TREE_CONSTANT (t) = 1;
2420 return t;
2423 /* Build a newly constructed TREE_VEC node of length LEN. */
2425 tree
2426 make_tree_vec (int len MEM_STAT_DECL)
2428 tree t;
2429 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2431 record_node_allocation_statistics (TREE_VEC, length);
2433 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2435 TREE_SET_CODE (t, TREE_VEC);
2436 TREE_VEC_LENGTH (t) = len;
2438 return t;
2441 /* Grow a TREE_VEC node to new length LEN. */
2443 tree
2444 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2446 gcc_assert (TREE_CODE (v) == TREE_VEC);
2448 int oldlen = TREE_VEC_LENGTH (v);
2449 gcc_assert (len > oldlen);
2451 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2452 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2454 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2456 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2458 TREE_VEC_LENGTH (v) = len;
2460 return v;
2463 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2464 fixed, and scalar, complex or vector. */
2467 zerop (const_tree expr)
2469 return (integer_zerop (expr)
2470 || real_zerop (expr)
2471 || fixed_zerop (expr));
2474 /* Return 1 if EXPR is the integer constant zero or a complex constant
2475 of zero. */
2478 integer_zerop (const_tree expr)
2480 switch (TREE_CODE (expr))
2482 case INTEGER_CST:
2483 return wi::to_wide (expr) == 0;
2484 case COMPLEX_CST:
2485 return (integer_zerop (TREE_REALPART (expr))
2486 && integer_zerop (TREE_IMAGPART (expr)));
2487 case VECTOR_CST:
2488 return (VECTOR_CST_NPATTERNS (expr) == 1
2489 && VECTOR_CST_DUPLICATE_P (expr)
2490 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2491 default:
2492 return false;
2496 /* Return 1 if EXPR is the integer constant one or the corresponding
2497 complex constant. */
2500 integer_onep (const_tree expr)
2502 switch (TREE_CODE (expr))
2504 case INTEGER_CST:
2505 return wi::eq_p (wi::to_widest (expr), 1);
2506 case COMPLEX_CST:
2507 return (integer_onep (TREE_REALPART (expr))
2508 && integer_zerop (TREE_IMAGPART (expr)));
2509 case VECTOR_CST:
2510 return (VECTOR_CST_NPATTERNS (expr) == 1
2511 && VECTOR_CST_DUPLICATE_P (expr)
2512 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2513 default:
2514 return false;
2518 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2519 return 1 if every piece is the integer constant one. */
2522 integer_each_onep (const_tree expr)
2524 if (TREE_CODE (expr) == COMPLEX_CST)
2525 return (integer_onep (TREE_REALPART (expr))
2526 && integer_onep (TREE_IMAGPART (expr)));
2527 else
2528 return integer_onep (expr);
2531 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2532 it contains, or a complex or vector whose subparts are such integers. */
2535 integer_all_onesp (const_tree expr)
2537 if (TREE_CODE (expr) == COMPLEX_CST
2538 && integer_all_onesp (TREE_REALPART (expr))
2539 && integer_all_onesp (TREE_IMAGPART (expr)))
2540 return 1;
2542 else if (TREE_CODE (expr) == VECTOR_CST)
2543 return (VECTOR_CST_NPATTERNS (expr) == 1
2544 && VECTOR_CST_DUPLICATE_P (expr)
2545 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2547 else if (TREE_CODE (expr) != INTEGER_CST)
2548 return 0;
2550 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2551 == wi::to_wide (expr));
2554 /* Return 1 if EXPR is the integer constant minus one. */
2557 integer_minus_onep (const_tree expr)
2559 if (TREE_CODE (expr) == COMPLEX_CST)
2560 return (integer_all_onesp (TREE_REALPART (expr))
2561 && integer_zerop (TREE_IMAGPART (expr)));
2562 else
2563 return integer_all_onesp (expr);
2566 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2567 one bit on). */
2570 integer_pow2p (const_tree expr)
2572 if (TREE_CODE (expr) == COMPLEX_CST
2573 && integer_pow2p (TREE_REALPART (expr))
2574 && integer_zerop (TREE_IMAGPART (expr)))
2575 return 1;
2577 if (TREE_CODE (expr) != INTEGER_CST)
2578 return 0;
2580 return wi::popcount (wi::to_wide (expr)) == 1;
2583 /* Return 1 if EXPR is an integer constant other than zero or a
2584 complex constant other than zero. */
2587 integer_nonzerop (const_tree expr)
2589 return ((TREE_CODE (expr) == INTEGER_CST
2590 && wi::to_wide (expr) != 0)
2591 || (TREE_CODE (expr) == COMPLEX_CST
2592 && (integer_nonzerop (TREE_REALPART (expr))
2593 || integer_nonzerop (TREE_IMAGPART (expr)))));
2596 /* Return 1 if EXPR is the integer constant one. For vector,
2597 return 1 if every piece is the integer constant minus one
2598 (representing the value TRUE). */
2601 integer_truep (const_tree expr)
2603 if (TREE_CODE (expr) == VECTOR_CST)
2604 return integer_all_onesp (expr);
2605 return integer_onep (expr);
2608 /* Return 1 if EXPR is the fixed-point constant zero. */
2611 fixed_zerop (const_tree expr)
2613 return (TREE_CODE (expr) == FIXED_CST
2614 && TREE_FIXED_CST (expr).data.is_zero ());
2617 /* Return the power of two represented by a tree node known to be a
2618 power of two. */
2621 tree_log2 (const_tree expr)
2623 if (TREE_CODE (expr) == COMPLEX_CST)
2624 return tree_log2 (TREE_REALPART (expr));
2626 return wi::exact_log2 (wi::to_wide (expr));
2629 /* Similar, but return the largest integer Y such that 2 ** Y is less
2630 than or equal to EXPR. */
2633 tree_floor_log2 (const_tree expr)
2635 if (TREE_CODE (expr) == COMPLEX_CST)
2636 return tree_log2 (TREE_REALPART (expr));
2638 return wi::floor_log2 (wi::to_wide (expr));
2641 /* Return number of known trailing zero bits in EXPR, or, if the value of
2642 EXPR is known to be zero, the precision of it's type. */
2644 unsigned int
2645 tree_ctz (const_tree expr)
2647 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2648 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2649 return 0;
2651 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2652 switch (TREE_CODE (expr))
2654 case INTEGER_CST:
2655 ret1 = wi::ctz (wi::to_wide (expr));
2656 return MIN (ret1, prec);
2657 case SSA_NAME:
2658 ret1 = wi::ctz (get_nonzero_bits (expr));
2659 return MIN (ret1, prec);
2660 case PLUS_EXPR:
2661 case MINUS_EXPR:
2662 case BIT_IOR_EXPR:
2663 case BIT_XOR_EXPR:
2664 case MIN_EXPR:
2665 case MAX_EXPR:
2666 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2667 if (ret1 == 0)
2668 return ret1;
2669 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2670 return MIN (ret1, ret2);
2671 case POINTER_PLUS_EXPR:
2672 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2673 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2674 /* Second operand is sizetype, which could be in theory
2675 wider than pointer's precision. Make sure we never
2676 return more than prec. */
2677 ret2 = MIN (ret2, prec);
2678 return MIN (ret1, ret2);
2679 case BIT_AND_EXPR:
2680 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2681 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2682 return MAX (ret1, ret2);
2683 case MULT_EXPR:
2684 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2685 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2686 return MIN (ret1 + ret2, prec);
2687 case LSHIFT_EXPR:
2688 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2689 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2690 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2692 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2693 return MIN (ret1 + ret2, prec);
2695 return ret1;
2696 case RSHIFT_EXPR:
2697 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2698 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2700 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2701 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2702 if (ret1 > ret2)
2703 return ret1 - ret2;
2705 return 0;
2706 case TRUNC_DIV_EXPR:
2707 case CEIL_DIV_EXPR:
2708 case FLOOR_DIV_EXPR:
2709 case ROUND_DIV_EXPR:
2710 case EXACT_DIV_EXPR:
2711 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2712 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2714 int l = tree_log2 (TREE_OPERAND (expr, 1));
2715 if (l >= 0)
2717 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2718 ret2 = l;
2719 if (ret1 > ret2)
2720 return ret1 - ret2;
2723 return 0;
2724 CASE_CONVERT:
2725 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2726 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2727 ret1 = prec;
2728 return MIN (ret1, prec);
2729 case SAVE_EXPR:
2730 return tree_ctz (TREE_OPERAND (expr, 0));
2731 case COND_EXPR:
2732 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2733 if (ret1 == 0)
2734 return 0;
2735 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2736 return MIN (ret1, ret2);
2737 case COMPOUND_EXPR:
2738 return tree_ctz (TREE_OPERAND (expr, 1));
2739 case ADDR_EXPR:
2740 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2741 if (ret1 > BITS_PER_UNIT)
2743 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2744 return MIN (ret1, prec);
2746 return 0;
2747 default:
2748 return 0;
2752 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2753 decimal float constants, so don't return 1 for them. */
2756 real_zerop (const_tree expr)
2758 switch (TREE_CODE (expr))
2760 case REAL_CST:
2761 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2762 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2763 case COMPLEX_CST:
2764 return real_zerop (TREE_REALPART (expr))
2765 && real_zerop (TREE_IMAGPART (expr));
2766 case VECTOR_CST:
2768 /* Don't simply check for a duplicate because the predicate
2769 accepts both +0.0 and -0.0. */
2770 unsigned count = vector_cst_encoded_nelts (expr);
2771 for (unsigned int i = 0; i < count; ++i)
2772 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2773 return false;
2774 return true;
2776 default:
2777 return false;
2781 /* Return 1 if EXPR is the real constant one in real or complex form.
2782 Trailing zeroes matter for decimal float constants, so don't return
2783 1 for them. */
2786 real_onep (const_tree expr)
2788 switch (TREE_CODE (expr))
2790 case REAL_CST:
2791 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2792 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2793 case COMPLEX_CST:
2794 return real_onep (TREE_REALPART (expr))
2795 && real_zerop (TREE_IMAGPART (expr));
2796 case VECTOR_CST:
2797 return (VECTOR_CST_NPATTERNS (expr) == 1
2798 && VECTOR_CST_DUPLICATE_P (expr)
2799 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2800 default:
2801 return false;
2805 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2806 matter for decimal float constants, so don't return 1 for them. */
2809 real_minus_onep (const_tree expr)
2811 switch (TREE_CODE (expr))
2813 case REAL_CST:
2814 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2815 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2816 case COMPLEX_CST:
2817 return real_minus_onep (TREE_REALPART (expr))
2818 && real_zerop (TREE_IMAGPART (expr));
2819 case VECTOR_CST:
2820 return (VECTOR_CST_NPATTERNS (expr) == 1
2821 && VECTOR_CST_DUPLICATE_P (expr)
2822 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2823 default:
2824 return false;
2828 /* Nonzero if EXP is a constant or a cast of a constant. */
2831 really_constant_p (const_tree exp)
2833 /* This is not quite the same as STRIP_NOPS. It does more. */
2834 while (CONVERT_EXPR_P (exp)
2835 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2836 exp = TREE_OPERAND (exp, 0);
2837 return TREE_CONSTANT (exp);
2840 /* Return true if T holds a polynomial pointer difference, storing it in
2841 *VALUE if so. A true return means that T's precision is no greater
2842 than 64 bits, which is the largest address space we support, so *VALUE
2843 never loses precision. However, the signedness of the result does
2844 not necessarily match the signedness of T: sometimes an unsigned type
2845 like sizetype is used to encode a value that is actually negative. */
2847 bool
2848 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2850 if (!t)
2851 return false;
2852 if (TREE_CODE (t) == INTEGER_CST)
2854 if (!cst_and_fits_in_hwi (t))
2855 return false;
2856 *value = int_cst_value (t);
2857 return true;
2859 if (POLY_INT_CST_P (t))
2861 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2862 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2863 return false;
2864 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2865 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2866 return true;
2868 return false;
2871 poly_int64
2872 tree_to_poly_int64 (const_tree t)
2874 gcc_assert (tree_fits_poly_int64_p (t));
2875 if (POLY_INT_CST_P (t))
2876 return poly_int_cst_value (t).force_shwi ();
2877 return TREE_INT_CST_LOW (t);
2880 poly_uint64
2881 tree_to_poly_uint64 (const_tree t)
2883 gcc_assert (tree_fits_poly_uint64_p (t));
2884 if (POLY_INT_CST_P (t))
2885 return poly_int_cst_value (t).force_uhwi ();
2886 return TREE_INT_CST_LOW (t);
2889 /* Return first list element whose TREE_VALUE is ELEM.
2890 Return 0 if ELEM is not in LIST. */
2892 tree
2893 value_member (tree elem, tree list)
2895 while (list)
2897 if (elem == TREE_VALUE (list))
2898 return list;
2899 list = TREE_CHAIN (list);
2901 return NULL_TREE;
2904 /* Return first list element whose TREE_PURPOSE is ELEM.
2905 Return 0 if ELEM is not in LIST. */
2907 tree
2908 purpose_member (const_tree elem, tree list)
2910 while (list)
2912 if (elem == TREE_PURPOSE (list))
2913 return list;
2914 list = TREE_CHAIN (list);
2916 return NULL_TREE;
2919 /* Return true if ELEM is in V. */
2921 bool
2922 vec_member (const_tree elem, vec<tree, va_gc> *v)
2924 unsigned ix;
2925 tree t;
2926 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2927 if (elem == t)
2928 return true;
2929 return false;
2932 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2933 NULL_TREE. */
2935 tree
2936 chain_index (int idx, tree chain)
2938 for (; chain && idx > 0; --idx)
2939 chain = TREE_CHAIN (chain);
2940 return chain;
2943 /* Return nonzero if ELEM is part of the chain CHAIN. */
2946 chain_member (const_tree elem, const_tree chain)
2948 while (chain)
2950 if (elem == chain)
2951 return 1;
2952 chain = DECL_CHAIN (chain);
2955 return 0;
2958 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2959 We expect a null pointer to mark the end of the chain.
2960 This is the Lisp primitive `length'. */
2963 list_length (const_tree t)
2965 const_tree p = t;
2966 #ifdef ENABLE_TREE_CHECKING
2967 const_tree q = t;
2968 #endif
2969 int len = 0;
2971 while (p)
2973 p = TREE_CHAIN (p);
2974 #ifdef ENABLE_TREE_CHECKING
2975 if (len % 2)
2976 q = TREE_CHAIN (q);
2977 gcc_assert (p != q);
2978 #endif
2979 len++;
2982 return len;
2985 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2986 UNION_TYPE TYPE, or NULL_TREE if none. */
2988 tree
2989 first_field (const_tree type)
2991 tree t = TYPE_FIELDS (type);
2992 while (t && TREE_CODE (t) != FIELD_DECL)
2993 t = TREE_CHAIN (t);
2994 return t;
2997 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2998 by modifying the last node in chain 1 to point to chain 2.
2999 This is the Lisp primitive `nconc'. */
3001 tree
3002 chainon (tree op1, tree op2)
3004 tree t1;
3006 if (!op1)
3007 return op2;
3008 if (!op2)
3009 return op1;
3011 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3012 continue;
3013 TREE_CHAIN (t1) = op2;
3015 #ifdef ENABLE_TREE_CHECKING
3017 tree t2;
3018 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3019 gcc_assert (t2 != t1);
3021 #endif
3023 return op1;
3026 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3028 tree
3029 tree_last (tree chain)
3031 tree next;
3032 if (chain)
3033 while ((next = TREE_CHAIN (chain)))
3034 chain = next;
3035 return chain;
3038 /* Reverse the order of elements in the chain T,
3039 and return the new head of the chain (old last element). */
3041 tree
3042 nreverse (tree t)
3044 tree prev = 0, decl, next;
3045 for (decl = t; decl; decl = next)
3047 /* We shouldn't be using this function to reverse BLOCK chains; we
3048 have blocks_nreverse for that. */
3049 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3050 next = TREE_CHAIN (decl);
3051 TREE_CHAIN (decl) = prev;
3052 prev = decl;
3054 return prev;
3057 /* Return a newly created TREE_LIST node whose
3058 purpose and value fields are PARM and VALUE. */
3060 tree
3061 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3063 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3064 TREE_PURPOSE (t) = parm;
3065 TREE_VALUE (t) = value;
3066 return t;
3069 /* Build a chain of TREE_LIST nodes from a vector. */
3071 tree
3072 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3074 tree ret = NULL_TREE;
3075 tree *pp = &ret;
3076 unsigned int i;
3077 tree t;
3078 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3080 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3081 pp = &TREE_CHAIN (*pp);
3083 return ret;
3086 /* Return a newly created TREE_LIST node whose
3087 purpose and value fields are PURPOSE and VALUE
3088 and whose TREE_CHAIN is CHAIN. */
3090 tree
3091 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3093 tree node;
3095 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3096 memset (node, 0, sizeof (struct tree_common));
3098 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3100 TREE_SET_CODE (node, TREE_LIST);
3101 TREE_CHAIN (node) = chain;
3102 TREE_PURPOSE (node) = purpose;
3103 TREE_VALUE (node) = value;
3104 return node;
3107 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3108 trees. */
3110 vec<tree, va_gc> *
3111 ctor_to_vec (tree ctor)
3113 vec<tree, va_gc> *vec;
3114 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3115 unsigned int ix;
3116 tree val;
3118 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3119 vec->quick_push (val);
3121 return vec;
3124 /* Return the size nominally occupied by an object of type TYPE
3125 when it resides in memory. The value is measured in units of bytes,
3126 and its data type is that normally used for type sizes
3127 (which is the first type created by make_signed_type or
3128 make_unsigned_type). */
3130 tree
3131 size_in_bytes_loc (location_t loc, const_tree type)
3133 tree t;
3135 if (type == error_mark_node)
3136 return integer_zero_node;
3138 type = TYPE_MAIN_VARIANT (type);
3139 t = TYPE_SIZE_UNIT (type);
3141 if (t == 0)
3143 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3144 return size_zero_node;
3147 return t;
3150 /* Return the size of TYPE (in bytes) as a wide integer
3151 or return -1 if the size can vary or is larger than an integer. */
3153 HOST_WIDE_INT
3154 int_size_in_bytes (const_tree type)
3156 tree t;
3158 if (type == error_mark_node)
3159 return 0;
3161 type = TYPE_MAIN_VARIANT (type);
3162 t = TYPE_SIZE_UNIT (type);
3164 if (t && tree_fits_uhwi_p (t))
3165 return TREE_INT_CST_LOW (t);
3166 else
3167 return -1;
3170 /* Return the maximum size of TYPE (in bytes) as a wide integer
3171 or return -1 if the size can vary or is larger than an integer. */
3173 HOST_WIDE_INT
3174 max_int_size_in_bytes (const_tree type)
3176 HOST_WIDE_INT size = -1;
3177 tree size_tree;
3179 /* If this is an array type, check for a possible MAX_SIZE attached. */
3181 if (TREE_CODE (type) == ARRAY_TYPE)
3183 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3185 if (size_tree && tree_fits_uhwi_p (size_tree))
3186 size = tree_to_uhwi (size_tree);
3189 /* If we still haven't been able to get a size, see if the language
3190 can compute a maximum size. */
3192 if (size == -1)
3194 size_tree = lang_hooks.types.max_size (type);
3196 if (size_tree && tree_fits_uhwi_p (size_tree))
3197 size = tree_to_uhwi (size_tree);
3200 return size;
3203 /* Return the bit position of FIELD, in bits from the start of the record.
3204 This is a tree of type bitsizetype. */
3206 tree
3207 bit_position (const_tree field)
3209 return bit_from_pos (DECL_FIELD_OFFSET (field),
3210 DECL_FIELD_BIT_OFFSET (field));
3213 /* Return the byte position of FIELD, in bytes from the start of the record.
3214 This is a tree of type sizetype. */
3216 tree
3217 byte_position (const_tree field)
3219 return byte_from_pos (DECL_FIELD_OFFSET (field),
3220 DECL_FIELD_BIT_OFFSET (field));
3223 /* Likewise, but return as an integer. It must be representable in
3224 that way (since it could be a signed value, we don't have the
3225 option of returning -1 like int_size_in_byte can. */
3227 HOST_WIDE_INT
3228 int_byte_position (const_tree field)
3230 return tree_to_shwi (byte_position (field));
3233 /* Return the strictest alignment, in bits, that T is known to have. */
3235 unsigned int
3236 expr_align (const_tree t)
3238 unsigned int align0, align1;
3240 switch (TREE_CODE (t))
3242 CASE_CONVERT: case NON_LVALUE_EXPR:
3243 /* If we have conversions, we know that the alignment of the
3244 object must meet each of the alignments of the types. */
3245 align0 = expr_align (TREE_OPERAND (t, 0));
3246 align1 = TYPE_ALIGN (TREE_TYPE (t));
3247 return MAX (align0, align1);
3249 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3250 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3251 case CLEANUP_POINT_EXPR:
3252 /* These don't change the alignment of an object. */
3253 return expr_align (TREE_OPERAND (t, 0));
3255 case COND_EXPR:
3256 /* The best we can do is say that the alignment is the least aligned
3257 of the two arms. */
3258 align0 = expr_align (TREE_OPERAND (t, 1));
3259 align1 = expr_align (TREE_OPERAND (t, 2));
3260 return MIN (align0, align1);
3262 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3263 meaningfully, it's always 1. */
3264 case LABEL_DECL: case CONST_DECL:
3265 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3266 case FUNCTION_DECL:
3267 gcc_assert (DECL_ALIGN (t) != 0);
3268 return DECL_ALIGN (t);
3270 default:
3271 break;
3274 /* Otherwise take the alignment from that of the type. */
3275 return TYPE_ALIGN (TREE_TYPE (t));
3278 /* Return, as a tree node, the number of elements for TYPE (which is an
3279 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3281 tree
3282 array_type_nelts (const_tree type)
3284 tree index_type, min, max;
3286 /* If they did it with unspecified bounds, then we should have already
3287 given an error about it before we got here. */
3288 if (! TYPE_DOMAIN (type))
3289 return error_mark_node;
3291 index_type = TYPE_DOMAIN (type);
3292 min = TYPE_MIN_VALUE (index_type);
3293 max = TYPE_MAX_VALUE (index_type);
3295 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3296 if (!max)
3297 return error_mark_node;
3299 return (integer_zerop (min)
3300 ? max
3301 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3304 /* If arg is static -- a reference to an object in static storage -- then
3305 return the object. This is not the same as the C meaning of `static'.
3306 If arg isn't static, return NULL. */
3308 tree
3309 staticp (tree arg)
3311 switch (TREE_CODE (arg))
3313 case FUNCTION_DECL:
3314 /* Nested functions are static, even though taking their address will
3315 involve a trampoline as we unnest the nested function and create
3316 the trampoline on the tree level. */
3317 return arg;
3319 case VAR_DECL:
3320 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3321 && ! DECL_THREAD_LOCAL_P (arg)
3322 && ! DECL_DLLIMPORT_P (arg)
3323 ? arg : NULL);
3325 case CONST_DECL:
3326 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3327 ? arg : NULL);
3329 case CONSTRUCTOR:
3330 return TREE_STATIC (arg) ? arg : NULL;
3332 case LABEL_DECL:
3333 case STRING_CST:
3334 return arg;
3336 case COMPONENT_REF:
3337 /* If the thing being referenced is not a field, then it is
3338 something language specific. */
3339 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3341 /* If we are referencing a bitfield, we can't evaluate an
3342 ADDR_EXPR at compile time and so it isn't a constant. */
3343 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3344 return NULL;
3346 return staticp (TREE_OPERAND (arg, 0));
3348 case BIT_FIELD_REF:
3349 return NULL;
3351 case INDIRECT_REF:
3352 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3354 case ARRAY_REF:
3355 case ARRAY_RANGE_REF:
3356 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3357 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3358 return staticp (TREE_OPERAND (arg, 0));
3359 else
3360 return NULL;
3362 case COMPOUND_LITERAL_EXPR:
3363 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3365 default:
3366 return NULL;
3373 /* Return whether OP is a DECL whose address is function-invariant. */
3375 bool
3376 decl_address_invariant_p (const_tree op)
3378 /* The conditions below are slightly less strict than the one in
3379 staticp. */
3381 switch (TREE_CODE (op))
3383 case PARM_DECL:
3384 case RESULT_DECL:
3385 case LABEL_DECL:
3386 case FUNCTION_DECL:
3387 return true;
3389 case VAR_DECL:
3390 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3391 || DECL_THREAD_LOCAL_P (op)
3392 || DECL_CONTEXT (op) == current_function_decl
3393 || decl_function_context (op) == current_function_decl)
3394 return true;
3395 break;
3397 case CONST_DECL:
3398 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3399 || decl_function_context (op) == current_function_decl)
3400 return true;
3401 break;
3403 default:
3404 break;
3407 return false;
3410 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3412 bool
3413 decl_address_ip_invariant_p (const_tree op)
3415 /* The conditions below are slightly less strict than the one in
3416 staticp. */
3418 switch (TREE_CODE (op))
3420 case LABEL_DECL:
3421 case FUNCTION_DECL:
3422 case STRING_CST:
3423 return true;
3425 case VAR_DECL:
3426 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3427 && !DECL_DLLIMPORT_P (op))
3428 || DECL_THREAD_LOCAL_P (op))
3429 return true;
3430 break;
3432 case CONST_DECL:
3433 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3434 return true;
3435 break;
3437 default:
3438 break;
3441 return false;
3445 /* Return true if T is function-invariant (internal function, does
3446 not handle arithmetic; that's handled in skip_simple_arithmetic and
3447 tree_invariant_p). */
3449 static bool
3450 tree_invariant_p_1 (tree t)
3452 tree op;
3454 if (TREE_CONSTANT (t)
3455 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3456 return true;
3458 switch (TREE_CODE (t))
3460 case SAVE_EXPR:
3461 return true;
3463 case ADDR_EXPR:
3464 op = TREE_OPERAND (t, 0);
3465 while (handled_component_p (op))
3467 switch (TREE_CODE (op))
3469 case ARRAY_REF:
3470 case ARRAY_RANGE_REF:
3471 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3472 || TREE_OPERAND (op, 2) != NULL_TREE
3473 || TREE_OPERAND (op, 3) != NULL_TREE)
3474 return false;
3475 break;
3477 case COMPONENT_REF:
3478 if (TREE_OPERAND (op, 2) != NULL_TREE)
3479 return false;
3480 break;
3482 default:;
3484 op = TREE_OPERAND (op, 0);
3487 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3489 default:
3490 break;
3493 return false;
3496 /* Return true if T is function-invariant. */
3498 bool
3499 tree_invariant_p (tree t)
3501 tree inner = skip_simple_arithmetic (t);
3502 return tree_invariant_p_1 (inner);
3505 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3506 Do this to any expression which may be used in more than one place,
3507 but must be evaluated only once.
3509 Normally, expand_expr would reevaluate the expression each time.
3510 Calling save_expr produces something that is evaluated and recorded
3511 the first time expand_expr is called on it. Subsequent calls to
3512 expand_expr just reuse the recorded value.
3514 The call to expand_expr that generates code that actually computes
3515 the value is the first call *at compile time*. Subsequent calls
3516 *at compile time* generate code to use the saved value.
3517 This produces correct result provided that *at run time* control
3518 always flows through the insns made by the first expand_expr
3519 before reaching the other places where the save_expr was evaluated.
3520 You, the caller of save_expr, must make sure this is so.
3522 Constants, and certain read-only nodes, are returned with no
3523 SAVE_EXPR because that is safe. Expressions containing placeholders
3524 are not touched; see tree.def for an explanation of what these
3525 are used for. */
3527 tree
3528 save_expr (tree expr)
3530 tree inner;
3532 /* If the tree evaluates to a constant, then we don't want to hide that
3533 fact (i.e. this allows further folding, and direct checks for constants).
3534 However, a read-only object that has side effects cannot be bypassed.
3535 Since it is no problem to reevaluate literals, we just return the
3536 literal node. */
3537 inner = skip_simple_arithmetic (expr);
3538 if (TREE_CODE (inner) == ERROR_MARK)
3539 return inner;
3541 if (tree_invariant_p_1 (inner))
3542 return expr;
3544 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3545 it means that the size or offset of some field of an object depends on
3546 the value within another field.
3548 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3549 and some variable since it would then need to be both evaluated once and
3550 evaluated more than once. Front-ends must assure this case cannot
3551 happen by surrounding any such subexpressions in their own SAVE_EXPR
3552 and forcing evaluation at the proper time. */
3553 if (contains_placeholder_p (inner))
3554 return expr;
3556 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3558 /* This expression might be placed ahead of a jump to ensure that the
3559 value was computed on both sides of the jump. So make sure it isn't
3560 eliminated as dead. */
3561 TREE_SIDE_EFFECTS (expr) = 1;
3562 return expr;
3565 /* Look inside EXPR into any simple arithmetic operations. Return the
3566 outermost non-arithmetic or non-invariant node. */
3568 tree
3569 skip_simple_arithmetic (tree expr)
3571 /* We don't care about whether this can be used as an lvalue in this
3572 context. */
3573 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3574 expr = TREE_OPERAND (expr, 0);
3576 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3577 a constant, it will be more efficient to not make another SAVE_EXPR since
3578 it will allow better simplification and GCSE will be able to merge the
3579 computations if they actually occur. */
3580 while (true)
3582 if (UNARY_CLASS_P (expr))
3583 expr = TREE_OPERAND (expr, 0);
3584 else if (BINARY_CLASS_P (expr))
3586 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3587 expr = TREE_OPERAND (expr, 0);
3588 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3589 expr = TREE_OPERAND (expr, 1);
3590 else
3591 break;
3593 else
3594 break;
3597 return expr;
3600 /* Look inside EXPR into simple arithmetic operations involving constants.
3601 Return the outermost non-arithmetic or non-constant node. */
3603 tree
3604 skip_simple_constant_arithmetic (tree expr)
3606 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3607 expr = TREE_OPERAND (expr, 0);
3609 while (true)
3611 if (UNARY_CLASS_P (expr))
3612 expr = TREE_OPERAND (expr, 0);
3613 else if (BINARY_CLASS_P (expr))
3615 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3616 expr = TREE_OPERAND (expr, 0);
3617 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3618 expr = TREE_OPERAND (expr, 1);
3619 else
3620 break;
3622 else
3623 break;
3626 return expr;
3629 /* Return which tree structure is used by T. */
3631 enum tree_node_structure_enum
3632 tree_node_structure (const_tree t)
3634 const enum tree_code code = TREE_CODE (t);
3635 return tree_node_structure_for_code (code);
3638 /* Set various status flags when building a CALL_EXPR object T. */
3640 static void
3641 process_call_operands (tree t)
3643 bool side_effects = TREE_SIDE_EFFECTS (t);
3644 bool read_only = false;
3645 int i = call_expr_flags (t);
3647 /* Calls have side-effects, except those to const or pure functions. */
3648 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3649 side_effects = true;
3650 /* Propagate TREE_READONLY of arguments for const functions. */
3651 if (i & ECF_CONST)
3652 read_only = true;
3654 if (!side_effects || read_only)
3655 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3657 tree op = TREE_OPERAND (t, i);
3658 if (op && TREE_SIDE_EFFECTS (op))
3659 side_effects = true;
3660 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3661 read_only = false;
3664 TREE_SIDE_EFFECTS (t) = side_effects;
3665 TREE_READONLY (t) = read_only;
3668 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3669 size or offset that depends on a field within a record. */
3671 bool
3672 contains_placeholder_p (const_tree exp)
3674 enum tree_code code;
3676 if (!exp)
3677 return 0;
3679 code = TREE_CODE (exp);
3680 if (code == PLACEHOLDER_EXPR)
3681 return 1;
3683 switch (TREE_CODE_CLASS (code))
3685 case tcc_reference:
3686 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3687 position computations since they will be converted into a
3688 WITH_RECORD_EXPR involving the reference, which will assume
3689 here will be valid. */
3690 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3692 case tcc_exceptional:
3693 if (code == TREE_LIST)
3694 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3695 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3696 break;
3698 case tcc_unary:
3699 case tcc_binary:
3700 case tcc_comparison:
3701 case tcc_expression:
3702 switch (code)
3704 case COMPOUND_EXPR:
3705 /* Ignoring the first operand isn't quite right, but works best. */
3706 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3708 case COND_EXPR:
3709 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3710 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3711 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3713 case SAVE_EXPR:
3714 /* The save_expr function never wraps anything containing
3715 a PLACEHOLDER_EXPR. */
3716 return 0;
3718 default:
3719 break;
3722 switch (TREE_CODE_LENGTH (code))
3724 case 1:
3725 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3726 case 2:
3727 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3728 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3729 default:
3730 return 0;
3733 case tcc_vl_exp:
3734 switch (code)
3736 case CALL_EXPR:
3738 const_tree arg;
3739 const_call_expr_arg_iterator iter;
3740 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3741 if (CONTAINS_PLACEHOLDER_P (arg))
3742 return 1;
3743 return 0;
3745 default:
3746 return 0;
3749 default:
3750 return 0;
3752 return 0;
3755 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3756 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3757 field positions. */
3759 static bool
3760 type_contains_placeholder_1 (const_tree type)
3762 /* If the size contains a placeholder or the parent type (component type in
3763 the case of arrays) type involves a placeholder, this type does. */
3764 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3765 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3766 || (!POINTER_TYPE_P (type)
3767 && TREE_TYPE (type)
3768 && type_contains_placeholder_p (TREE_TYPE (type))))
3769 return true;
3771 /* Now do type-specific checks. Note that the last part of the check above
3772 greatly limits what we have to do below. */
3773 switch (TREE_CODE (type))
3775 case VOID_TYPE:
3776 case POINTER_BOUNDS_TYPE:
3777 case COMPLEX_TYPE:
3778 case ENUMERAL_TYPE:
3779 case BOOLEAN_TYPE:
3780 case POINTER_TYPE:
3781 case OFFSET_TYPE:
3782 case REFERENCE_TYPE:
3783 case METHOD_TYPE:
3784 case FUNCTION_TYPE:
3785 case VECTOR_TYPE:
3786 case NULLPTR_TYPE:
3787 return false;
3789 case INTEGER_TYPE:
3790 case REAL_TYPE:
3791 case FIXED_POINT_TYPE:
3792 /* Here we just check the bounds. */
3793 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3794 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3796 case ARRAY_TYPE:
3797 /* We have already checked the component type above, so just check
3798 the domain type. Flexible array members have a null domain. */
3799 return TYPE_DOMAIN (type) ?
3800 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3802 case RECORD_TYPE:
3803 case UNION_TYPE:
3804 case QUAL_UNION_TYPE:
3806 tree field;
3808 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3809 if (TREE_CODE (field) == FIELD_DECL
3810 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3811 || (TREE_CODE (type) == QUAL_UNION_TYPE
3812 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3813 || type_contains_placeholder_p (TREE_TYPE (field))))
3814 return true;
3816 return false;
3819 default:
3820 gcc_unreachable ();
3824 /* Wrapper around above function used to cache its result. */
3826 bool
3827 type_contains_placeholder_p (tree type)
3829 bool result;
3831 /* If the contains_placeholder_bits field has been initialized,
3832 then we know the answer. */
3833 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3834 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3836 /* Indicate that we've seen this type node, and the answer is false.
3837 This is what we want to return if we run into recursion via fields. */
3838 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3840 /* Compute the real value. */
3841 result = type_contains_placeholder_1 (type);
3843 /* Store the real value. */
3844 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3846 return result;
3849 /* Push tree EXP onto vector QUEUE if it is not already present. */
3851 static void
3852 push_without_duplicates (tree exp, vec<tree> *queue)
3854 unsigned int i;
3855 tree iter;
3857 FOR_EACH_VEC_ELT (*queue, i, iter)
3858 if (simple_cst_equal (iter, exp) == 1)
3859 break;
3861 if (!iter)
3862 queue->safe_push (exp);
3865 /* Given a tree EXP, find all occurrences of references to fields
3866 in a PLACEHOLDER_EXPR and place them in vector REFS without
3867 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3868 we assume here that EXP contains only arithmetic expressions
3869 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3870 argument list. */
3872 void
3873 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3875 enum tree_code code = TREE_CODE (exp);
3876 tree inner;
3877 int i;
3879 /* We handle TREE_LIST and COMPONENT_REF separately. */
3880 if (code == TREE_LIST)
3882 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3883 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3885 else if (code == COMPONENT_REF)
3887 for (inner = TREE_OPERAND (exp, 0);
3888 REFERENCE_CLASS_P (inner);
3889 inner = TREE_OPERAND (inner, 0))
3892 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3893 push_without_duplicates (exp, refs);
3894 else
3895 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3897 else
3898 switch (TREE_CODE_CLASS (code))
3900 case tcc_constant:
3901 break;
3903 case tcc_declaration:
3904 /* Variables allocated to static storage can stay. */
3905 if (!TREE_STATIC (exp))
3906 push_without_duplicates (exp, refs);
3907 break;
3909 case tcc_expression:
3910 /* This is the pattern built in ada/make_aligning_type. */
3911 if (code == ADDR_EXPR
3912 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3914 push_without_duplicates (exp, refs);
3915 break;
3918 /* Fall through. */
3920 case tcc_exceptional:
3921 case tcc_unary:
3922 case tcc_binary:
3923 case tcc_comparison:
3924 case tcc_reference:
3925 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3926 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3927 break;
3929 case tcc_vl_exp:
3930 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3931 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3932 break;
3934 default:
3935 gcc_unreachable ();
3939 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3940 return a tree with all occurrences of references to F in a
3941 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3942 CONST_DECLs. Note that we assume here that EXP contains only
3943 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3944 occurring only in their argument list. */
3946 tree
3947 substitute_in_expr (tree exp, tree f, tree r)
3949 enum tree_code code = TREE_CODE (exp);
3950 tree op0, op1, op2, op3;
3951 tree new_tree;
3953 /* We handle TREE_LIST and COMPONENT_REF separately. */
3954 if (code == TREE_LIST)
3956 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3957 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3958 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3959 return exp;
3961 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3963 else if (code == COMPONENT_REF)
3965 tree inner;
3967 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3968 and it is the right field, replace it with R. */
3969 for (inner = TREE_OPERAND (exp, 0);
3970 REFERENCE_CLASS_P (inner);
3971 inner = TREE_OPERAND (inner, 0))
3974 /* The field. */
3975 op1 = TREE_OPERAND (exp, 1);
3977 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3978 return r;
3980 /* If this expression hasn't been completed let, leave it alone. */
3981 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3982 return exp;
3984 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3985 if (op0 == TREE_OPERAND (exp, 0))
3986 return exp;
3988 new_tree
3989 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3991 else
3992 switch (TREE_CODE_CLASS (code))
3994 case tcc_constant:
3995 return exp;
3997 case tcc_declaration:
3998 if (exp == f)
3999 return r;
4000 else
4001 return exp;
4003 case tcc_expression:
4004 if (exp == f)
4005 return r;
4007 /* Fall through. */
4009 case tcc_exceptional:
4010 case tcc_unary:
4011 case tcc_binary:
4012 case tcc_comparison:
4013 case tcc_reference:
4014 switch (TREE_CODE_LENGTH (code))
4016 case 0:
4017 return exp;
4019 case 1:
4020 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4021 if (op0 == TREE_OPERAND (exp, 0))
4022 return exp;
4024 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4025 break;
4027 case 2:
4028 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4029 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4031 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4032 return exp;
4034 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4035 break;
4037 case 3:
4038 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4039 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4040 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4042 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4043 && op2 == TREE_OPERAND (exp, 2))
4044 return exp;
4046 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4047 break;
4049 case 4:
4050 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4051 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4052 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4053 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4055 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4056 && op2 == TREE_OPERAND (exp, 2)
4057 && op3 == TREE_OPERAND (exp, 3))
4058 return exp;
4060 new_tree
4061 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4062 break;
4064 default:
4065 gcc_unreachable ();
4067 break;
4069 case tcc_vl_exp:
4071 int i;
4073 new_tree = NULL_TREE;
4075 /* If we are trying to replace F with a constant or with another
4076 instance of one of the arguments of the call, inline back
4077 functions which do nothing else than computing a value from
4078 the arguments they are passed. This makes it possible to
4079 fold partially or entirely the replacement expression. */
4080 if (code == CALL_EXPR)
4082 bool maybe_inline = false;
4083 if (CONSTANT_CLASS_P (r))
4084 maybe_inline = true;
4085 else
4086 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4087 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4089 maybe_inline = true;
4090 break;
4092 if (maybe_inline)
4094 tree t = maybe_inline_call_in_expr (exp);
4095 if (t)
4096 return SUBSTITUTE_IN_EXPR (t, f, r);
4100 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4102 tree op = TREE_OPERAND (exp, i);
4103 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4104 if (new_op != op)
4106 if (!new_tree)
4107 new_tree = copy_node (exp);
4108 TREE_OPERAND (new_tree, i) = new_op;
4112 if (new_tree)
4114 new_tree = fold (new_tree);
4115 if (TREE_CODE (new_tree) == CALL_EXPR)
4116 process_call_operands (new_tree);
4118 else
4119 return exp;
4121 break;
4123 default:
4124 gcc_unreachable ();
4127 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4129 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4130 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4132 return new_tree;
4135 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4136 for it within OBJ, a tree that is an object or a chain of references. */
4138 tree
4139 substitute_placeholder_in_expr (tree exp, tree obj)
4141 enum tree_code code = TREE_CODE (exp);
4142 tree op0, op1, op2, op3;
4143 tree new_tree;
4145 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4146 in the chain of OBJ. */
4147 if (code == PLACEHOLDER_EXPR)
4149 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4150 tree elt;
4152 for (elt = obj; elt != 0;
4153 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4154 || TREE_CODE (elt) == COND_EXPR)
4155 ? TREE_OPERAND (elt, 1)
4156 : (REFERENCE_CLASS_P (elt)
4157 || UNARY_CLASS_P (elt)
4158 || BINARY_CLASS_P (elt)
4159 || VL_EXP_CLASS_P (elt)
4160 || EXPRESSION_CLASS_P (elt))
4161 ? TREE_OPERAND (elt, 0) : 0))
4162 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4163 return elt;
4165 for (elt = obj; elt != 0;
4166 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4167 || TREE_CODE (elt) == COND_EXPR)
4168 ? TREE_OPERAND (elt, 1)
4169 : (REFERENCE_CLASS_P (elt)
4170 || UNARY_CLASS_P (elt)
4171 || BINARY_CLASS_P (elt)
4172 || VL_EXP_CLASS_P (elt)
4173 || EXPRESSION_CLASS_P (elt))
4174 ? TREE_OPERAND (elt, 0) : 0))
4175 if (POINTER_TYPE_P (TREE_TYPE (elt))
4176 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4177 == need_type))
4178 return fold_build1 (INDIRECT_REF, need_type, elt);
4180 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4181 survives until RTL generation, there will be an error. */
4182 return exp;
4185 /* TREE_LIST is special because we need to look at TREE_VALUE
4186 and TREE_CHAIN, not TREE_OPERANDS. */
4187 else if (code == TREE_LIST)
4189 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4190 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4191 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4192 return exp;
4194 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4196 else
4197 switch (TREE_CODE_CLASS (code))
4199 case tcc_constant:
4200 case tcc_declaration:
4201 return exp;
4203 case tcc_exceptional:
4204 case tcc_unary:
4205 case tcc_binary:
4206 case tcc_comparison:
4207 case tcc_expression:
4208 case tcc_reference:
4209 case tcc_statement:
4210 switch (TREE_CODE_LENGTH (code))
4212 case 0:
4213 return exp;
4215 case 1:
4216 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4217 if (op0 == TREE_OPERAND (exp, 0))
4218 return exp;
4220 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4221 break;
4223 case 2:
4224 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4225 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4227 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4228 return exp;
4230 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4231 break;
4233 case 3:
4234 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4235 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4236 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4238 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4239 && op2 == TREE_OPERAND (exp, 2))
4240 return exp;
4242 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4243 break;
4245 case 4:
4246 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4247 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4248 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4249 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4251 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4252 && op2 == TREE_OPERAND (exp, 2)
4253 && op3 == TREE_OPERAND (exp, 3))
4254 return exp;
4256 new_tree
4257 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4258 break;
4260 default:
4261 gcc_unreachable ();
4263 break;
4265 case tcc_vl_exp:
4267 int i;
4269 new_tree = NULL_TREE;
4271 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4273 tree op = TREE_OPERAND (exp, i);
4274 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4275 if (new_op != op)
4277 if (!new_tree)
4278 new_tree = copy_node (exp);
4279 TREE_OPERAND (new_tree, i) = new_op;
4283 if (new_tree)
4285 new_tree = fold (new_tree);
4286 if (TREE_CODE (new_tree) == CALL_EXPR)
4287 process_call_operands (new_tree);
4289 else
4290 return exp;
4292 break;
4294 default:
4295 gcc_unreachable ();
4298 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4300 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4301 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4303 return new_tree;
4307 /* Subroutine of stabilize_reference; this is called for subtrees of
4308 references. Any expression with side-effects must be put in a SAVE_EXPR
4309 to ensure that it is only evaluated once.
4311 We don't put SAVE_EXPR nodes around everything, because assigning very
4312 simple expressions to temporaries causes us to miss good opportunities
4313 for optimizations. Among other things, the opportunity to fold in the
4314 addition of a constant into an addressing mode often gets lost, e.g.
4315 "y[i+1] += x;". In general, we take the approach that we should not make
4316 an assignment unless we are forced into it - i.e., that any non-side effect
4317 operator should be allowed, and that cse should take care of coalescing
4318 multiple utterances of the same expression should that prove fruitful. */
4320 static tree
4321 stabilize_reference_1 (tree e)
4323 tree result;
4324 enum tree_code code = TREE_CODE (e);
4326 /* We cannot ignore const expressions because it might be a reference
4327 to a const array but whose index contains side-effects. But we can
4328 ignore things that are actual constant or that already have been
4329 handled by this function. */
4331 if (tree_invariant_p (e))
4332 return e;
4334 switch (TREE_CODE_CLASS (code))
4336 case tcc_exceptional:
4337 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4338 have side-effects. */
4339 if (code == STATEMENT_LIST)
4340 return save_expr (e);
4341 /* FALLTHRU */
4342 case tcc_type:
4343 case tcc_declaration:
4344 case tcc_comparison:
4345 case tcc_statement:
4346 case tcc_expression:
4347 case tcc_reference:
4348 case tcc_vl_exp:
4349 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4350 so that it will only be evaluated once. */
4351 /* The reference (r) and comparison (<) classes could be handled as
4352 below, but it is generally faster to only evaluate them once. */
4353 if (TREE_SIDE_EFFECTS (e))
4354 return save_expr (e);
4355 return e;
4357 case tcc_constant:
4358 /* Constants need no processing. In fact, we should never reach
4359 here. */
4360 return e;
4362 case tcc_binary:
4363 /* Division is slow and tends to be compiled with jumps,
4364 especially the division by powers of 2 that is often
4365 found inside of an array reference. So do it just once. */
4366 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4367 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4368 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4369 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4370 return save_expr (e);
4371 /* Recursively stabilize each operand. */
4372 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4373 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4374 break;
4376 case tcc_unary:
4377 /* Recursively stabilize each operand. */
4378 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4379 break;
4381 default:
4382 gcc_unreachable ();
4385 TREE_TYPE (result) = TREE_TYPE (e);
4386 TREE_READONLY (result) = TREE_READONLY (e);
4387 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4388 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4390 return result;
4393 /* Stabilize a reference so that we can use it any number of times
4394 without causing its operands to be evaluated more than once.
4395 Returns the stabilized reference. This works by means of save_expr,
4396 so see the caveats in the comments about save_expr.
4398 Also allows conversion expressions whose operands are references.
4399 Any other kind of expression is returned unchanged. */
4401 tree
4402 stabilize_reference (tree ref)
4404 tree result;
4405 enum tree_code code = TREE_CODE (ref);
4407 switch (code)
4409 case VAR_DECL:
4410 case PARM_DECL:
4411 case RESULT_DECL:
4412 /* No action is needed in this case. */
4413 return ref;
4415 CASE_CONVERT:
4416 case FLOAT_EXPR:
4417 case FIX_TRUNC_EXPR:
4418 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4419 break;
4421 case INDIRECT_REF:
4422 result = build_nt (INDIRECT_REF,
4423 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4424 break;
4426 case COMPONENT_REF:
4427 result = build_nt (COMPONENT_REF,
4428 stabilize_reference (TREE_OPERAND (ref, 0)),
4429 TREE_OPERAND (ref, 1), NULL_TREE);
4430 break;
4432 case BIT_FIELD_REF:
4433 result = build_nt (BIT_FIELD_REF,
4434 stabilize_reference (TREE_OPERAND (ref, 0)),
4435 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4436 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4437 break;
4439 case ARRAY_REF:
4440 result = build_nt (ARRAY_REF,
4441 stabilize_reference (TREE_OPERAND (ref, 0)),
4442 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4443 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4444 break;
4446 case ARRAY_RANGE_REF:
4447 result = build_nt (ARRAY_RANGE_REF,
4448 stabilize_reference (TREE_OPERAND (ref, 0)),
4449 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4450 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4451 break;
4453 case COMPOUND_EXPR:
4454 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4455 it wouldn't be ignored. This matters when dealing with
4456 volatiles. */
4457 return stabilize_reference_1 (ref);
4459 /* If arg isn't a kind of lvalue we recognize, make no change.
4460 Caller should recognize the error for an invalid lvalue. */
4461 default:
4462 return ref;
4464 case ERROR_MARK:
4465 return error_mark_node;
4468 TREE_TYPE (result) = TREE_TYPE (ref);
4469 TREE_READONLY (result) = TREE_READONLY (ref);
4470 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4471 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4473 return result;
4476 /* Low-level constructors for expressions. */
4478 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4479 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4481 void
4482 recompute_tree_invariant_for_addr_expr (tree t)
4484 tree node;
4485 bool tc = true, se = false;
4487 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4489 /* We started out assuming this address is both invariant and constant, but
4490 does not have side effects. Now go down any handled components and see if
4491 any of them involve offsets that are either non-constant or non-invariant.
4492 Also check for side-effects.
4494 ??? Note that this code makes no attempt to deal with the case where
4495 taking the address of something causes a copy due to misalignment. */
4497 #define UPDATE_FLAGS(NODE) \
4498 do { tree _node = (NODE); \
4499 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4500 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4502 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4503 node = TREE_OPERAND (node, 0))
4505 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4506 array reference (probably made temporarily by the G++ front end),
4507 so ignore all the operands. */
4508 if ((TREE_CODE (node) == ARRAY_REF
4509 || TREE_CODE (node) == ARRAY_RANGE_REF)
4510 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4512 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4513 if (TREE_OPERAND (node, 2))
4514 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4515 if (TREE_OPERAND (node, 3))
4516 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4518 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4519 FIELD_DECL, apparently. The G++ front end can put something else
4520 there, at least temporarily. */
4521 else if (TREE_CODE (node) == COMPONENT_REF
4522 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4524 if (TREE_OPERAND (node, 2))
4525 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4529 node = lang_hooks.expr_to_decl (node, &tc, &se);
4531 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4532 the address, since &(*a)->b is a form of addition. If it's a constant, the
4533 address is constant too. If it's a decl, its address is constant if the
4534 decl is static. Everything else is not constant and, furthermore,
4535 taking the address of a volatile variable is not volatile. */
4536 if (TREE_CODE (node) == INDIRECT_REF
4537 || TREE_CODE (node) == MEM_REF)
4538 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4539 else if (CONSTANT_CLASS_P (node))
4541 else if (DECL_P (node))
4542 tc &= (staticp (node) != NULL_TREE);
4543 else
4545 tc = false;
4546 se |= TREE_SIDE_EFFECTS (node);
4550 TREE_CONSTANT (t) = tc;
4551 TREE_SIDE_EFFECTS (t) = se;
4552 #undef UPDATE_FLAGS
4555 /* Build an expression of code CODE, data type TYPE, and operands as
4556 specified. Expressions and reference nodes can be created this way.
4557 Constants, decls, types and misc nodes cannot be.
4559 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4560 enough for all extant tree codes. */
4562 tree
4563 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4565 tree t;
4567 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4569 t = make_node (code PASS_MEM_STAT);
4570 TREE_TYPE (t) = tt;
4572 return t;
4575 tree
4576 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4578 int length = sizeof (struct tree_exp);
4579 tree t;
4581 record_node_allocation_statistics (code, length);
4583 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4585 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4587 memset (t, 0, sizeof (struct tree_common));
4589 TREE_SET_CODE (t, code);
4591 TREE_TYPE (t) = type;
4592 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4593 TREE_OPERAND (t, 0) = node;
4594 if (node && !TYPE_P (node))
4596 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4597 TREE_READONLY (t) = TREE_READONLY (node);
4600 if (TREE_CODE_CLASS (code) == tcc_statement)
4602 if (code != DEBUG_BEGIN_STMT)
4603 TREE_SIDE_EFFECTS (t) = 1;
4605 else switch (code)
4607 case VA_ARG_EXPR:
4608 /* All of these have side-effects, no matter what their
4609 operands are. */
4610 TREE_SIDE_EFFECTS (t) = 1;
4611 TREE_READONLY (t) = 0;
4612 break;
4614 case INDIRECT_REF:
4615 /* Whether a dereference is readonly has nothing to do with whether
4616 its operand is readonly. */
4617 TREE_READONLY (t) = 0;
4618 break;
4620 case ADDR_EXPR:
4621 if (node)
4622 recompute_tree_invariant_for_addr_expr (t);
4623 break;
4625 default:
4626 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4627 && node && !TYPE_P (node)
4628 && TREE_CONSTANT (node))
4629 TREE_CONSTANT (t) = 1;
4630 if (TREE_CODE_CLASS (code) == tcc_reference
4631 && node && TREE_THIS_VOLATILE (node))
4632 TREE_THIS_VOLATILE (t) = 1;
4633 break;
4636 return t;
4639 #define PROCESS_ARG(N) \
4640 do { \
4641 TREE_OPERAND (t, N) = arg##N; \
4642 if (arg##N &&!TYPE_P (arg##N)) \
4644 if (TREE_SIDE_EFFECTS (arg##N)) \
4645 side_effects = 1; \
4646 if (!TREE_READONLY (arg##N) \
4647 && !CONSTANT_CLASS_P (arg##N)) \
4648 (void) (read_only = 0); \
4649 if (!TREE_CONSTANT (arg##N)) \
4650 (void) (constant = 0); \
4652 } while (0)
4654 tree
4655 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4657 bool constant, read_only, side_effects, div_by_zero;
4658 tree t;
4660 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4662 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4663 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4664 /* When sizetype precision doesn't match that of pointers
4665 we need to be able to build explicit extensions or truncations
4666 of the offset argument. */
4667 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4668 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4669 && TREE_CODE (arg1) == INTEGER_CST);
4671 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4672 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4673 && ptrofftype_p (TREE_TYPE (arg1)));
4675 t = make_node (code PASS_MEM_STAT);
4676 TREE_TYPE (t) = tt;
4678 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4679 result based on those same flags for the arguments. But if the
4680 arguments aren't really even `tree' expressions, we shouldn't be trying
4681 to do this. */
4683 /* Expressions without side effects may be constant if their
4684 arguments are as well. */
4685 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4686 || TREE_CODE_CLASS (code) == tcc_binary);
4687 read_only = 1;
4688 side_effects = TREE_SIDE_EFFECTS (t);
4690 switch (code)
4692 case TRUNC_DIV_EXPR:
4693 case CEIL_DIV_EXPR:
4694 case FLOOR_DIV_EXPR:
4695 case ROUND_DIV_EXPR:
4696 case EXACT_DIV_EXPR:
4697 case CEIL_MOD_EXPR:
4698 case FLOOR_MOD_EXPR:
4699 case ROUND_MOD_EXPR:
4700 case TRUNC_MOD_EXPR:
4701 div_by_zero = integer_zerop (arg1);
4702 break;
4703 default:
4704 div_by_zero = false;
4707 PROCESS_ARG (0);
4708 PROCESS_ARG (1);
4710 TREE_SIDE_EFFECTS (t) = side_effects;
4711 if (code == MEM_REF)
4713 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4715 tree o = TREE_OPERAND (arg0, 0);
4716 TREE_READONLY (t) = TREE_READONLY (o);
4717 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4720 else
4722 TREE_READONLY (t) = read_only;
4723 /* Don't mark X / 0 as constant. */
4724 TREE_CONSTANT (t) = constant && !div_by_zero;
4725 TREE_THIS_VOLATILE (t)
4726 = (TREE_CODE_CLASS (code) == tcc_reference
4727 && arg0 && TREE_THIS_VOLATILE (arg0));
4730 return t;
4734 tree
4735 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4736 tree arg2 MEM_STAT_DECL)
4738 bool constant, read_only, side_effects;
4739 tree t;
4741 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4742 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4744 t = make_node (code PASS_MEM_STAT);
4745 TREE_TYPE (t) = tt;
4747 read_only = 1;
4749 /* As a special exception, if COND_EXPR has NULL branches, we
4750 assume that it is a gimple statement and always consider
4751 it to have side effects. */
4752 if (code == COND_EXPR
4753 && tt == void_type_node
4754 && arg1 == NULL_TREE
4755 && arg2 == NULL_TREE)
4756 side_effects = true;
4757 else
4758 side_effects = TREE_SIDE_EFFECTS (t);
4760 PROCESS_ARG (0);
4761 PROCESS_ARG (1);
4762 PROCESS_ARG (2);
4764 if (code == COND_EXPR)
4765 TREE_READONLY (t) = read_only;
4767 TREE_SIDE_EFFECTS (t) = side_effects;
4768 TREE_THIS_VOLATILE (t)
4769 = (TREE_CODE_CLASS (code) == tcc_reference
4770 && arg0 && TREE_THIS_VOLATILE (arg0));
4772 return t;
4775 tree
4776 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4777 tree arg2, tree arg3 MEM_STAT_DECL)
4779 bool constant, read_only, side_effects;
4780 tree t;
4782 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4784 t = make_node (code PASS_MEM_STAT);
4785 TREE_TYPE (t) = tt;
4787 side_effects = TREE_SIDE_EFFECTS (t);
4789 PROCESS_ARG (0);
4790 PROCESS_ARG (1);
4791 PROCESS_ARG (2);
4792 PROCESS_ARG (3);
4794 TREE_SIDE_EFFECTS (t) = side_effects;
4795 TREE_THIS_VOLATILE (t)
4796 = (TREE_CODE_CLASS (code) == tcc_reference
4797 && arg0 && TREE_THIS_VOLATILE (arg0));
4799 return t;
4802 tree
4803 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4804 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4806 bool constant, read_only, side_effects;
4807 tree t;
4809 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4811 t = make_node (code PASS_MEM_STAT);
4812 TREE_TYPE (t) = tt;
4814 side_effects = TREE_SIDE_EFFECTS (t);
4816 PROCESS_ARG (0);
4817 PROCESS_ARG (1);
4818 PROCESS_ARG (2);
4819 PROCESS_ARG (3);
4820 PROCESS_ARG (4);
4822 TREE_SIDE_EFFECTS (t) = side_effects;
4823 if (code == TARGET_MEM_REF)
4825 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4827 tree o = TREE_OPERAND (arg0, 0);
4828 TREE_READONLY (t) = TREE_READONLY (o);
4829 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4832 else
4833 TREE_THIS_VOLATILE (t)
4834 = (TREE_CODE_CLASS (code) == tcc_reference
4835 && arg0 && TREE_THIS_VOLATILE (arg0));
4837 return t;
4840 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4841 on the pointer PTR. */
4843 tree
4844 build_simple_mem_ref_loc (location_t loc, tree ptr)
4846 poly_int64 offset = 0;
4847 tree ptype = TREE_TYPE (ptr);
4848 tree tem;
4849 /* For convenience allow addresses that collapse to a simple base
4850 and offset. */
4851 if (TREE_CODE (ptr) == ADDR_EXPR
4852 && (handled_component_p (TREE_OPERAND (ptr, 0))
4853 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4855 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4856 gcc_assert (ptr);
4857 if (TREE_CODE (ptr) == MEM_REF)
4859 offset += mem_ref_offset (ptr).force_shwi ();
4860 ptr = TREE_OPERAND (ptr, 0);
4862 else
4863 ptr = build_fold_addr_expr (ptr);
4864 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4866 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4867 ptr, build_int_cst (ptype, offset));
4868 SET_EXPR_LOCATION (tem, loc);
4869 return tem;
4872 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4874 poly_offset_int
4875 mem_ref_offset (const_tree t)
4877 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4878 SIGNED);
4881 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4882 offsetted by OFFSET units. */
4884 tree
4885 build_invariant_address (tree type, tree base, poly_int64 offset)
4887 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4888 build_fold_addr_expr (base),
4889 build_int_cst (ptr_type_node, offset));
4890 tree addr = build1 (ADDR_EXPR, type, ref);
4891 recompute_tree_invariant_for_addr_expr (addr);
4892 return addr;
4895 /* Similar except don't specify the TREE_TYPE
4896 and leave the TREE_SIDE_EFFECTS as 0.
4897 It is permissible for arguments to be null,
4898 or even garbage if their values do not matter. */
4900 tree
4901 build_nt (enum tree_code code, ...)
4903 tree t;
4904 int length;
4905 int i;
4906 va_list p;
4908 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4910 va_start (p, code);
4912 t = make_node (code);
4913 length = TREE_CODE_LENGTH (code);
4915 for (i = 0; i < length; i++)
4916 TREE_OPERAND (t, i) = va_arg (p, tree);
4918 va_end (p);
4919 return t;
4922 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4923 tree vec. */
4925 tree
4926 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4928 tree ret, t;
4929 unsigned int ix;
4931 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4932 CALL_EXPR_FN (ret) = fn;
4933 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4934 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4935 CALL_EXPR_ARG (ret, ix) = t;
4936 return ret;
4939 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4940 We do NOT enter this node in any sort of symbol table.
4942 LOC is the location of the decl.
4944 layout_decl is used to set up the decl's storage layout.
4945 Other slots are initialized to 0 or null pointers. */
4947 tree
4948 build_decl (location_t loc, enum tree_code code, tree name,
4949 tree type MEM_STAT_DECL)
4951 tree t;
4953 t = make_node (code PASS_MEM_STAT);
4954 DECL_SOURCE_LOCATION (t) = loc;
4956 /* if (type == error_mark_node)
4957 type = integer_type_node; */
4958 /* That is not done, deliberately, so that having error_mark_node
4959 as the type can suppress useless errors in the use of this variable. */
4961 DECL_NAME (t) = name;
4962 TREE_TYPE (t) = type;
4964 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4965 layout_decl (t, 0);
4967 return t;
4970 /* Builds and returns function declaration with NAME and TYPE. */
4972 tree
4973 build_fn_decl (const char *name, tree type)
4975 tree id = get_identifier (name);
4976 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4978 DECL_EXTERNAL (decl) = 1;
4979 TREE_PUBLIC (decl) = 1;
4980 DECL_ARTIFICIAL (decl) = 1;
4981 TREE_NOTHROW (decl) = 1;
4983 return decl;
4986 vec<tree, va_gc> *all_translation_units;
4988 /* Builds a new translation-unit decl with name NAME, queues it in the
4989 global list of translation-unit decls and returns it. */
4991 tree
4992 build_translation_unit_decl (tree name)
4994 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4995 name, NULL_TREE);
4996 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4997 vec_safe_push (all_translation_units, tu);
4998 return tu;
5002 /* BLOCK nodes are used to represent the structure of binding contours
5003 and declarations, once those contours have been exited and their contents
5004 compiled. This information is used for outputting debugging info. */
5006 tree
5007 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5009 tree block = make_node (BLOCK);
5011 BLOCK_VARS (block) = vars;
5012 BLOCK_SUBBLOCKS (block) = subblocks;
5013 BLOCK_SUPERCONTEXT (block) = supercontext;
5014 BLOCK_CHAIN (block) = chain;
5015 return block;
5019 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5021 LOC is the location to use in tree T. */
5023 void
5024 protected_set_expr_location (tree t, location_t loc)
5026 if (CAN_HAVE_LOCATION_P (t))
5027 SET_EXPR_LOCATION (t, loc);
5030 /* Reset the expression *EXPR_P, a size or position.
5032 ??? We could reset all non-constant sizes or positions. But it's cheap
5033 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5035 We need to reset self-referential sizes or positions because they cannot
5036 be gimplified and thus can contain a CALL_EXPR after the gimplification
5037 is finished, which will run afoul of LTO streaming. And they need to be
5038 reset to something essentially dummy but not constant, so as to preserve
5039 the properties of the object they are attached to. */
5041 static inline void
5042 free_lang_data_in_one_sizepos (tree *expr_p)
5044 tree expr = *expr_p;
5045 if (CONTAINS_PLACEHOLDER_P (expr))
5046 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5050 /* Reset all the fields in a binfo node BINFO. We only keep
5051 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5053 static void
5054 free_lang_data_in_binfo (tree binfo)
5056 unsigned i;
5057 tree t;
5059 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5061 BINFO_VIRTUALS (binfo) = NULL_TREE;
5062 BINFO_BASE_ACCESSES (binfo) = NULL;
5063 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5064 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5066 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5067 free_lang_data_in_binfo (t);
5071 /* Reset all language specific information still present in TYPE. */
5073 static void
5074 free_lang_data_in_type (tree type)
5076 gcc_assert (TYPE_P (type));
5078 /* Give the FE a chance to remove its own data first. */
5079 lang_hooks.free_lang_data (type);
5081 TREE_LANG_FLAG_0 (type) = 0;
5082 TREE_LANG_FLAG_1 (type) = 0;
5083 TREE_LANG_FLAG_2 (type) = 0;
5084 TREE_LANG_FLAG_3 (type) = 0;
5085 TREE_LANG_FLAG_4 (type) = 0;
5086 TREE_LANG_FLAG_5 (type) = 0;
5087 TREE_LANG_FLAG_6 (type) = 0;
5089 if (TREE_CODE (type) == FUNCTION_TYPE)
5091 /* Remove the const and volatile qualifiers from arguments. The
5092 C++ front end removes them, but the C front end does not,
5093 leading to false ODR violation errors when merging two
5094 instances of the same function signature compiled by
5095 different front ends. */
5096 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5098 tree arg_type = TREE_VALUE (p);
5100 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5102 int quals = TYPE_QUALS (arg_type)
5103 & ~TYPE_QUAL_CONST
5104 & ~TYPE_QUAL_VOLATILE;
5105 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5106 free_lang_data_in_type (TREE_VALUE (p));
5108 /* C++ FE uses TREE_PURPOSE to store initial values. */
5109 TREE_PURPOSE (p) = NULL;
5112 else if (TREE_CODE (type) == METHOD_TYPE)
5113 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5114 /* C++ FE uses TREE_PURPOSE to store initial values. */
5115 TREE_PURPOSE (p) = NULL;
5116 else if (RECORD_OR_UNION_TYPE_P (type))
5118 /* Remove members that are not FIELD_DECLs from the field list
5119 of an aggregate. These occur in C++. */
5120 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5121 if (TREE_CODE (member) == FIELD_DECL)
5122 prev = &DECL_CHAIN (member);
5123 else
5124 *prev = DECL_CHAIN (member);
5126 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5127 and danagle the pointer from time to time. */
5128 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5129 TYPE_VFIELD (type) = NULL_TREE;
5131 if (TYPE_BINFO (type))
5133 free_lang_data_in_binfo (TYPE_BINFO (type));
5134 /* We need to preserve link to bases and virtual table for all
5135 polymorphic types to make devirtualization machinery working. */
5136 if (!BINFO_VTABLE (TYPE_BINFO (type))
5137 || !flag_devirtualize)
5138 TYPE_BINFO (type) = NULL;
5141 else if (INTEGRAL_TYPE_P (type)
5142 || SCALAR_FLOAT_TYPE_P (type)
5143 || FIXED_POINT_TYPE_P (type))
5145 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5146 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5149 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5151 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5152 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5154 if (TYPE_CONTEXT (type)
5155 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5157 tree ctx = TYPE_CONTEXT (type);
5160 ctx = BLOCK_SUPERCONTEXT (ctx);
5162 while (ctx && TREE_CODE (ctx) == BLOCK);
5163 TYPE_CONTEXT (type) = ctx;
5166 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5167 TYPE_DECL if the type doesn't have linkage. */
5168 if (! type_with_linkage_p (type))
5169 TYPE_NAME (type) = TYPE_IDENTIFIER (type);
5173 /* Return true if DECL may need an assembler name to be set. */
5175 static inline bool
5176 need_assembler_name_p (tree decl)
5178 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5179 Rule merging. This makes type_odr_p to return true on those types during
5180 LTO and by comparing the mangled name, we can say what types are intended
5181 to be equivalent across compilation unit.
5183 We do not store names of type_in_anonymous_namespace_p.
5185 Record, union and enumeration type have linkage that allows use
5186 to check type_in_anonymous_namespace_p. We do not mangle compound types
5187 that always can be compared structurally.
5189 Similarly for builtin types, we compare properties of their main variant.
5190 A special case are integer types where mangling do make differences
5191 between char/signed char/unsigned char etc. Storing name for these makes
5192 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5193 See cp/mangle.c:write_builtin_type for details. */
5195 if (flag_lto_odr_type_mering
5196 && TREE_CODE (decl) == TYPE_DECL
5197 && DECL_NAME (decl)
5198 && decl == TYPE_NAME (TREE_TYPE (decl))
5199 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5200 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5201 && (type_with_linkage_p (TREE_TYPE (decl))
5202 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5203 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5204 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5205 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5206 if (!VAR_OR_FUNCTION_DECL_P (decl))
5207 return false;
5209 /* If DECL already has its assembler name set, it does not need a
5210 new one. */
5211 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5212 || DECL_ASSEMBLER_NAME_SET_P (decl))
5213 return false;
5215 /* Abstract decls do not need an assembler name. */
5216 if (DECL_ABSTRACT_P (decl))
5217 return false;
5219 /* For VAR_DECLs, only static, public and external symbols need an
5220 assembler name. */
5221 if (VAR_P (decl)
5222 && !TREE_STATIC (decl)
5223 && !TREE_PUBLIC (decl)
5224 && !DECL_EXTERNAL (decl))
5225 return false;
5227 if (TREE_CODE (decl) == FUNCTION_DECL)
5229 /* Do not set assembler name on builtins. Allow RTL expansion to
5230 decide whether to expand inline or via a regular call. */
5231 if (DECL_BUILT_IN (decl)
5232 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5233 return false;
5235 /* Functions represented in the callgraph need an assembler name. */
5236 if (cgraph_node::get (decl) != NULL)
5237 return true;
5239 /* Unused and not public functions don't need an assembler name. */
5240 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5241 return false;
5244 return true;
5248 /* Reset all language specific information still present in symbol
5249 DECL. */
5251 static void
5252 free_lang_data_in_decl (tree decl)
5254 gcc_assert (DECL_P (decl));
5256 /* Give the FE a chance to remove its own data first. */
5257 lang_hooks.free_lang_data (decl);
5259 TREE_LANG_FLAG_0 (decl) = 0;
5260 TREE_LANG_FLAG_1 (decl) = 0;
5261 TREE_LANG_FLAG_2 (decl) = 0;
5262 TREE_LANG_FLAG_3 (decl) = 0;
5263 TREE_LANG_FLAG_4 (decl) = 0;
5264 TREE_LANG_FLAG_5 (decl) = 0;
5265 TREE_LANG_FLAG_6 (decl) = 0;
5267 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5268 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5269 if (TREE_CODE (decl) == FIELD_DECL)
5271 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5272 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5273 DECL_QUALIFIER (decl) = NULL_TREE;
5276 if (TREE_CODE (decl) == FUNCTION_DECL)
5278 struct cgraph_node *node;
5279 if (!(node = cgraph_node::get (decl))
5280 || (!node->definition && !node->clones))
5282 if (node)
5283 node->release_body ();
5284 else
5286 release_function_body (decl);
5287 DECL_ARGUMENTS (decl) = NULL;
5288 DECL_RESULT (decl) = NULL;
5289 DECL_INITIAL (decl) = error_mark_node;
5292 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5294 tree t;
5296 /* If DECL has a gimple body, then the context for its
5297 arguments must be DECL. Otherwise, it doesn't really
5298 matter, as we will not be emitting any code for DECL. In
5299 general, there may be other instances of DECL created by
5300 the front end and since PARM_DECLs are generally shared,
5301 their DECL_CONTEXT changes as the replicas of DECL are
5302 created. The only time where DECL_CONTEXT is important
5303 is for the FUNCTION_DECLs that have a gimple body (since
5304 the PARM_DECL will be used in the function's body). */
5305 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5306 DECL_CONTEXT (t) = decl;
5307 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5308 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5309 = target_option_default_node;
5310 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5311 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5312 = optimization_default_node;
5315 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5316 At this point, it is not needed anymore. */
5317 DECL_SAVED_TREE (decl) = NULL_TREE;
5319 /* Clear the abstract origin if it refers to a method.
5320 Otherwise dwarf2out.c will ICE as we splice functions out of
5321 TYPE_FIELDS and thus the origin will not be output
5322 correctly. */
5323 if (DECL_ABSTRACT_ORIGIN (decl)
5324 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5325 && RECORD_OR_UNION_TYPE_P
5326 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5327 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5329 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5330 DECL_VINDEX referring to itself into a vtable slot number as it
5331 should. Happens with functions that are copied and then forgotten
5332 about. Just clear it, it won't matter anymore. */
5333 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5334 DECL_VINDEX (decl) = NULL_TREE;
5336 else if (VAR_P (decl))
5338 if ((DECL_EXTERNAL (decl)
5339 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5340 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5341 DECL_INITIAL (decl) = NULL_TREE;
5343 else if (TREE_CODE (decl) == TYPE_DECL)
5345 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5346 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5347 DECL_INITIAL (decl) = NULL_TREE;
5349 else if (TREE_CODE (decl) == FIELD_DECL)
5350 DECL_INITIAL (decl) = NULL_TREE;
5351 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5352 && DECL_INITIAL (decl)
5353 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5355 /* Strip builtins from the translation-unit BLOCK. We still have targets
5356 without builtin_decl_explicit support and also builtins are shared
5357 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5358 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5359 while (*nextp)
5361 tree var = *nextp;
5362 if (TREE_CODE (var) == FUNCTION_DECL
5363 && DECL_BUILT_IN (var))
5364 *nextp = TREE_CHAIN (var);
5365 else
5366 nextp = &TREE_CHAIN (var);
5372 /* Data used when collecting DECLs and TYPEs for language data removal. */
5374 struct free_lang_data_d
5376 free_lang_data_d () : decls (100), types (100) {}
5378 /* Worklist to avoid excessive recursion. */
5379 auto_vec<tree> worklist;
5381 /* Set of traversed objects. Used to avoid duplicate visits. */
5382 hash_set<tree> pset;
5384 /* Array of symbols to process with free_lang_data_in_decl. */
5385 auto_vec<tree> decls;
5387 /* Array of types to process with free_lang_data_in_type. */
5388 auto_vec<tree> types;
5392 /* Add type or decl T to one of the list of tree nodes that need their
5393 language data removed. The lists are held inside FLD. */
5395 static void
5396 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5398 if (DECL_P (t))
5399 fld->decls.safe_push (t);
5400 else if (TYPE_P (t))
5401 fld->types.safe_push (t);
5402 else
5403 gcc_unreachable ();
5406 /* Push tree node T into FLD->WORKLIST. */
5408 static inline void
5409 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5411 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5412 fld->worklist.safe_push ((t));
5416 /* Operand callback helper for free_lang_data_in_node. *TP is the
5417 subtree operand being considered. */
5419 static tree
5420 find_decls_types_r (tree *tp, int *ws, void *data)
5422 tree t = *tp;
5423 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5425 if (TREE_CODE (t) == TREE_LIST)
5426 return NULL_TREE;
5428 /* Language specific nodes will be removed, so there is no need
5429 to gather anything under them. */
5430 if (is_lang_specific (t))
5432 *ws = 0;
5433 return NULL_TREE;
5436 if (DECL_P (t))
5438 /* Note that walk_tree does not traverse every possible field in
5439 decls, so we have to do our own traversals here. */
5440 add_tree_to_fld_list (t, fld);
5442 fld_worklist_push (DECL_NAME (t), fld);
5443 fld_worklist_push (DECL_CONTEXT (t), fld);
5444 fld_worklist_push (DECL_SIZE (t), fld);
5445 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5447 /* We are going to remove everything under DECL_INITIAL for
5448 TYPE_DECLs. No point walking them. */
5449 if (TREE_CODE (t) != TYPE_DECL)
5450 fld_worklist_push (DECL_INITIAL (t), fld);
5452 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5453 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5455 if (TREE_CODE (t) == FUNCTION_DECL)
5457 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5458 fld_worklist_push (DECL_RESULT (t), fld);
5460 else if (TREE_CODE (t) == TYPE_DECL)
5462 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5464 else if (TREE_CODE (t) == FIELD_DECL)
5466 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5467 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5468 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5469 fld_worklist_push (DECL_FCONTEXT (t), fld);
5472 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5473 && DECL_HAS_VALUE_EXPR_P (t))
5474 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5476 if (TREE_CODE (t) != FIELD_DECL
5477 && TREE_CODE (t) != TYPE_DECL)
5478 fld_worklist_push (TREE_CHAIN (t), fld);
5479 *ws = 0;
5481 else if (TYPE_P (t))
5483 /* Note that walk_tree does not traverse every possible field in
5484 types, so we have to do our own traversals here. */
5485 add_tree_to_fld_list (t, fld);
5487 if (!RECORD_OR_UNION_TYPE_P (t))
5488 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5489 fld_worklist_push (TYPE_SIZE (t), fld);
5490 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5491 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5492 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5493 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5494 fld_worklist_push (TYPE_NAME (t), fld);
5495 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5496 them and thus do not and want not to reach unused pointer types
5497 this way. */
5498 if (!POINTER_TYPE_P (t))
5499 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5500 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5501 if (!RECORD_OR_UNION_TYPE_P (t))
5502 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5503 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5504 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5505 do not and want not to reach unused variants this way. */
5506 if (TYPE_CONTEXT (t))
5508 tree ctx = TYPE_CONTEXT (t);
5509 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5510 So push that instead. */
5511 while (ctx && TREE_CODE (ctx) == BLOCK)
5512 ctx = BLOCK_SUPERCONTEXT (ctx);
5513 fld_worklist_push (ctx, fld);
5515 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5516 and want not to reach unused types this way. */
5518 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5520 unsigned i;
5521 tree tem;
5522 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5523 fld_worklist_push (TREE_TYPE (tem), fld);
5524 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5525 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5527 if (RECORD_OR_UNION_TYPE_P (t))
5529 tree tem;
5530 /* Push all TYPE_FIELDS - there can be interleaving interesting
5531 and non-interesting things. */
5532 tem = TYPE_FIELDS (t);
5533 while (tem)
5535 if (TREE_CODE (tem) == FIELD_DECL
5536 || (TREE_CODE (tem) == TYPE_DECL
5537 && !DECL_IGNORED_P (tem)
5538 && debug_info_level > DINFO_LEVEL_TERSE
5539 && !is_redundant_typedef (tem)))
5540 fld_worklist_push (tem, fld);
5541 tem = TREE_CHAIN (tem);
5544 if (FUNC_OR_METHOD_TYPE_P (t))
5545 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5547 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5548 *ws = 0;
5550 else if (TREE_CODE (t) == BLOCK)
5552 tree tem;
5553 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5554 fld_worklist_push (tem, fld);
5555 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5556 fld_worklist_push (tem, fld);
5557 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5560 if (TREE_CODE (t) != IDENTIFIER_NODE
5561 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5562 fld_worklist_push (TREE_TYPE (t), fld);
5564 return NULL_TREE;
5568 /* Find decls and types in T. */
5570 static void
5571 find_decls_types (tree t, struct free_lang_data_d *fld)
5573 while (1)
5575 if (!fld->pset.contains (t))
5576 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5577 if (fld->worklist.is_empty ())
5578 break;
5579 t = fld->worklist.pop ();
5583 /* Translate all the types in LIST with the corresponding runtime
5584 types. */
5586 static tree
5587 get_eh_types_for_runtime (tree list)
5589 tree head, prev;
5591 if (list == NULL_TREE)
5592 return NULL_TREE;
5594 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5595 prev = head;
5596 list = TREE_CHAIN (list);
5597 while (list)
5599 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5600 TREE_CHAIN (prev) = n;
5601 prev = TREE_CHAIN (prev);
5602 list = TREE_CHAIN (list);
5605 return head;
5609 /* Find decls and types referenced in EH region R and store them in
5610 FLD->DECLS and FLD->TYPES. */
5612 static void
5613 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5615 switch (r->type)
5617 case ERT_CLEANUP:
5618 break;
5620 case ERT_TRY:
5622 eh_catch c;
5624 /* The types referenced in each catch must first be changed to the
5625 EH types used at runtime. This removes references to FE types
5626 in the region. */
5627 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5629 c->type_list = get_eh_types_for_runtime (c->type_list);
5630 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5633 break;
5635 case ERT_ALLOWED_EXCEPTIONS:
5636 r->u.allowed.type_list
5637 = get_eh_types_for_runtime (r->u.allowed.type_list);
5638 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5639 break;
5641 case ERT_MUST_NOT_THROW:
5642 walk_tree (&r->u.must_not_throw.failure_decl,
5643 find_decls_types_r, fld, &fld->pset);
5644 break;
5649 /* Find decls and types referenced in cgraph node N and store them in
5650 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5651 look for *every* kind of DECL and TYPE node reachable from N,
5652 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5653 NAMESPACE_DECLs, etc). */
5655 static void
5656 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5658 basic_block bb;
5659 struct function *fn;
5660 unsigned ix;
5661 tree t;
5663 find_decls_types (n->decl, fld);
5665 if (!gimple_has_body_p (n->decl))
5666 return;
5668 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5670 fn = DECL_STRUCT_FUNCTION (n->decl);
5672 /* Traverse locals. */
5673 FOR_EACH_LOCAL_DECL (fn, ix, t)
5674 find_decls_types (t, fld);
5676 /* Traverse EH regions in FN. */
5678 eh_region r;
5679 FOR_ALL_EH_REGION_FN (r, fn)
5680 find_decls_types_in_eh_region (r, fld);
5683 /* Traverse every statement in FN. */
5684 FOR_EACH_BB_FN (bb, fn)
5686 gphi_iterator psi;
5687 gimple_stmt_iterator si;
5688 unsigned i;
5690 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5692 gphi *phi = psi.phi ();
5694 for (i = 0; i < gimple_phi_num_args (phi); i++)
5696 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5697 find_decls_types (*arg_p, fld);
5701 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5703 gimple *stmt = gsi_stmt (si);
5705 if (is_gimple_call (stmt))
5706 find_decls_types (gimple_call_fntype (stmt), fld);
5708 for (i = 0; i < gimple_num_ops (stmt); i++)
5710 tree arg = gimple_op (stmt, i);
5711 find_decls_types (arg, fld);
5718 /* Find decls and types referenced in varpool node N and store them in
5719 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5720 look for *every* kind of DECL and TYPE node reachable from N,
5721 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5722 NAMESPACE_DECLs, etc). */
5724 static void
5725 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5727 find_decls_types (v->decl, fld);
5730 /* If T needs an assembler name, have one created for it. */
5732 void
5733 assign_assembler_name_if_needed (tree t)
5735 if (need_assembler_name_p (t))
5737 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5738 diagnostics that use input_location to show locus
5739 information. The problem here is that, at this point,
5740 input_location is generally anchored to the end of the file
5741 (since the parser is long gone), so we don't have a good
5742 position to pin it to.
5744 To alleviate this problem, this uses the location of T's
5745 declaration. Examples of this are
5746 testsuite/g++.dg/template/cond2.C and
5747 testsuite/g++.dg/template/pr35240.C. */
5748 location_t saved_location = input_location;
5749 input_location = DECL_SOURCE_LOCATION (t);
5751 decl_assembler_name (t);
5753 input_location = saved_location;
5758 /* Free language specific information for every operand and expression
5759 in every node of the call graph. This process operates in three stages:
5761 1- Every callgraph node and varpool node is traversed looking for
5762 decls and types embedded in them. This is a more exhaustive
5763 search than that done by find_referenced_vars, because it will
5764 also collect individual fields, decls embedded in types, etc.
5766 2- All the decls found are sent to free_lang_data_in_decl.
5768 3- All the types found are sent to free_lang_data_in_type.
5770 The ordering between decls and types is important because
5771 free_lang_data_in_decl sets assembler names, which includes
5772 mangling. So types cannot be freed up until assembler names have
5773 been set up. */
5775 static void
5776 free_lang_data_in_cgraph (void)
5778 struct cgraph_node *n;
5779 varpool_node *v;
5780 struct free_lang_data_d fld;
5781 tree t;
5782 unsigned i;
5783 alias_pair *p;
5785 /* Find decls and types in the body of every function in the callgraph. */
5786 FOR_EACH_FUNCTION (n)
5787 find_decls_types_in_node (n, &fld);
5789 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5790 find_decls_types (p->decl, &fld);
5792 /* Find decls and types in every varpool symbol. */
5793 FOR_EACH_VARIABLE (v)
5794 find_decls_types_in_var (v, &fld);
5796 /* Set the assembler name on every decl found. We need to do this
5797 now because free_lang_data_in_decl will invalidate data needed
5798 for mangling. This breaks mangling on interdependent decls. */
5799 FOR_EACH_VEC_ELT (fld.decls, i, t)
5800 assign_assembler_name_if_needed (t);
5802 /* Traverse every decl found freeing its language data. */
5803 FOR_EACH_VEC_ELT (fld.decls, i, t)
5804 free_lang_data_in_decl (t);
5806 /* Traverse every type found freeing its language data. */
5807 FOR_EACH_VEC_ELT (fld.types, i, t)
5808 free_lang_data_in_type (t);
5809 if (flag_checking)
5811 FOR_EACH_VEC_ELT (fld.types, i, t)
5812 verify_type (t);
5817 /* Free resources that are used by FE but are not needed once they are done. */
5819 static unsigned
5820 free_lang_data (void)
5822 unsigned i;
5824 /* If we are the LTO frontend we have freed lang-specific data already. */
5825 if (in_lto_p
5826 || (!flag_generate_lto && !flag_generate_offload))
5827 return 0;
5829 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5830 if (vec_safe_is_empty (all_translation_units))
5831 build_translation_unit_decl (NULL_TREE);
5833 /* Allocate and assign alias sets to the standard integer types
5834 while the slots are still in the way the frontends generated them. */
5835 for (i = 0; i < itk_none; ++i)
5836 if (integer_types[i])
5837 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5839 /* Traverse the IL resetting language specific information for
5840 operands, expressions, etc. */
5841 free_lang_data_in_cgraph ();
5843 /* Create gimple variants for common types. */
5844 for (unsigned i = 0;
5845 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5846 ++i)
5847 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5849 /* Reset some langhooks. Do not reset types_compatible_p, it may
5850 still be used indirectly via the get_alias_set langhook. */
5851 lang_hooks.dwarf_name = lhd_dwarf_name;
5852 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5853 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5855 /* We do not want the default decl_assembler_name implementation,
5856 rather if we have fixed everything we want a wrapper around it
5857 asserting that all non-local symbols already got their assembler
5858 name and only produce assembler names for local symbols. Or rather
5859 make sure we never call decl_assembler_name on local symbols and
5860 devise a separate, middle-end private scheme for it. */
5862 /* Reset diagnostic machinery. */
5863 tree_diagnostics_defaults (global_dc);
5865 rebuild_type_inheritance_graph ();
5867 return 0;
5871 namespace {
5873 const pass_data pass_data_ipa_free_lang_data =
5875 SIMPLE_IPA_PASS, /* type */
5876 "*free_lang_data", /* name */
5877 OPTGROUP_NONE, /* optinfo_flags */
5878 TV_IPA_FREE_LANG_DATA, /* tv_id */
5879 0, /* properties_required */
5880 0, /* properties_provided */
5881 0, /* properties_destroyed */
5882 0, /* todo_flags_start */
5883 0, /* todo_flags_finish */
5886 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5888 public:
5889 pass_ipa_free_lang_data (gcc::context *ctxt)
5890 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5893 /* opt_pass methods: */
5894 virtual unsigned int execute (function *) { return free_lang_data (); }
5896 }; // class pass_ipa_free_lang_data
5898 } // anon namespace
5900 simple_ipa_opt_pass *
5901 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5903 return new pass_ipa_free_lang_data (ctxt);
5906 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5907 of the various TYPE_QUAL values. */
5909 static void
5910 set_type_quals (tree type, int type_quals)
5912 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5913 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5914 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5915 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5916 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5919 /* Returns true iff CAND and BASE have equivalent language-specific
5920 qualifiers. */
5922 bool
5923 check_lang_type (const_tree cand, const_tree base)
5925 if (lang_hooks.types.type_hash_eq == NULL)
5926 return true;
5927 /* type_hash_eq currently only applies to these types. */
5928 if (TREE_CODE (cand) != FUNCTION_TYPE
5929 && TREE_CODE (cand) != METHOD_TYPE)
5930 return true;
5931 return lang_hooks.types.type_hash_eq (cand, base);
5934 /* Returns true iff unqualified CAND and BASE are equivalent. */
5936 bool
5937 check_base_type (const_tree cand, const_tree base)
5939 return (TYPE_NAME (cand) == TYPE_NAME (base)
5940 /* Apparently this is needed for Objective-C. */
5941 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5942 /* Check alignment. */
5943 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5944 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5945 TYPE_ATTRIBUTES (base)));
5948 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5950 bool
5951 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5953 return (TYPE_QUALS (cand) == type_quals
5954 && check_base_type (cand, base)
5955 && check_lang_type (cand, base));
5958 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5960 static bool
5961 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5963 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5964 && TYPE_NAME (cand) == TYPE_NAME (base)
5965 /* Apparently this is needed for Objective-C. */
5966 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5967 /* Check alignment. */
5968 && TYPE_ALIGN (cand) == align
5969 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5970 TYPE_ATTRIBUTES (base))
5971 && check_lang_type (cand, base));
5974 /* This function checks to see if TYPE matches the size one of the built-in
5975 atomic types, and returns that core atomic type. */
5977 static tree
5978 find_atomic_core_type (tree type)
5980 tree base_atomic_type;
5982 /* Only handle complete types. */
5983 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5984 return NULL_TREE;
5986 switch (tree_to_uhwi (TYPE_SIZE (type)))
5988 case 8:
5989 base_atomic_type = atomicQI_type_node;
5990 break;
5992 case 16:
5993 base_atomic_type = atomicHI_type_node;
5994 break;
5996 case 32:
5997 base_atomic_type = atomicSI_type_node;
5998 break;
6000 case 64:
6001 base_atomic_type = atomicDI_type_node;
6002 break;
6004 case 128:
6005 base_atomic_type = atomicTI_type_node;
6006 break;
6008 default:
6009 base_atomic_type = NULL_TREE;
6012 return base_atomic_type;
6015 /* Return a version of the TYPE, qualified as indicated by the
6016 TYPE_QUALS, if one exists. If no qualified version exists yet,
6017 return NULL_TREE. */
6019 tree
6020 get_qualified_type (tree type, int type_quals)
6022 tree t;
6024 if (TYPE_QUALS (type) == type_quals)
6025 return type;
6027 /* Search the chain of variants to see if there is already one there just
6028 like the one we need to have. If so, use that existing one. We must
6029 preserve the TYPE_NAME, since there is code that depends on this. */
6030 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6031 if (check_qualified_type (t, type, type_quals))
6032 return t;
6034 return NULL_TREE;
6037 /* Like get_qualified_type, but creates the type if it does not
6038 exist. This function never returns NULL_TREE. */
6040 tree
6041 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6043 tree t;
6045 /* See if we already have the appropriate qualified variant. */
6046 t = get_qualified_type (type, type_quals);
6048 /* If not, build it. */
6049 if (!t)
6051 t = build_variant_type_copy (type PASS_MEM_STAT);
6052 set_type_quals (t, type_quals);
6054 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6056 /* See if this object can map to a basic atomic type. */
6057 tree atomic_type = find_atomic_core_type (type);
6058 if (atomic_type)
6060 /* Ensure the alignment of this type is compatible with
6061 the required alignment of the atomic type. */
6062 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6063 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6067 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6068 /* Propagate structural equality. */
6069 SET_TYPE_STRUCTURAL_EQUALITY (t);
6070 else if (TYPE_CANONICAL (type) != type)
6071 /* Build the underlying canonical type, since it is different
6072 from TYPE. */
6074 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6075 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6077 else
6078 /* T is its own canonical type. */
6079 TYPE_CANONICAL (t) = t;
6083 return t;
6086 /* Create a variant of type T with alignment ALIGN. */
6088 tree
6089 build_aligned_type (tree type, unsigned int align)
6091 tree t;
6093 if (TYPE_PACKED (type)
6094 || TYPE_ALIGN (type) == align)
6095 return type;
6097 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6098 if (check_aligned_type (t, type, align))
6099 return t;
6101 t = build_variant_type_copy (type);
6102 SET_TYPE_ALIGN (t, align);
6103 TYPE_USER_ALIGN (t) = 1;
6105 return t;
6108 /* Create a new distinct copy of TYPE. The new type is made its own
6109 MAIN_VARIANT. If TYPE requires structural equality checks, the
6110 resulting type requires structural equality checks; otherwise, its
6111 TYPE_CANONICAL points to itself. */
6113 tree
6114 build_distinct_type_copy (tree type MEM_STAT_DECL)
6116 tree t = copy_node (type PASS_MEM_STAT);
6118 TYPE_POINTER_TO (t) = 0;
6119 TYPE_REFERENCE_TO (t) = 0;
6121 /* Set the canonical type either to a new equivalence class, or
6122 propagate the need for structural equality checks. */
6123 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6124 SET_TYPE_STRUCTURAL_EQUALITY (t);
6125 else
6126 TYPE_CANONICAL (t) = t;
6128 /* Make it its own variant. */
6129 TYPE_MAIN_VARIANT (t) = t;
6130 TYPE_NEXT_VARIANT (t) = 0;
6132 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6133 whose TREE_TYPE is not t. This can also happen in the Ada
6134 frontend when using subtypes. */
6136 return t;
6139 /* Create a new variant of TYPE, equivalent but distinct. This is so
6140 the caller can modify it. TYPE_CANONICAL for the return type will
6141 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6142 are considered equal by the language itself (or that both types
6143 require structural equality checks). */
6145 tree
6146 build_variant_type_copy (tree type MEM_STAT_DECL)
6148 tree t, m = TYPE_MAIN_VARIANT (type);
6150 t = build_distinct_type_copy (type PASS_MEM_STAT);
6152 /* Since we're building a variant, assume that it is a non-semantic
6153 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6154 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6155 /* Type variants have no alias set defined. */
6156 TYPE_ALIAS_SET (t) = -1;
6158 /* Add the new type to the chain of variants of TYPE. */
6159 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6160 TYPE_NEXT_VARIANT (m) = t;
6161 TYPE_MAIN_VARIANT (t) = m;
6163 return t;
6166 /* Return true if the from tree in both tree maps are equal. */
6169 tree_map_base_eq (const void *va, const void *vb)
6171 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6172 *const b = (const struct tree_map_base *) vb;
6173 return (a->from == b->from);
6176 /* Hash a from tree in a tree_base_map. */
6178 unsigned int
6179 tree_map_base_hash (const void *item)
6181 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6184 /* Return true if this tree map structure is marked for garbage collection
6185 purposes. We simply return true if the from tree is marked, so that this
6186 structure goes away when the from tree goes away. */
6189 tree_map_base_marked_p (const void *p)
6191 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6194 /* Hash a from tree in a tree_map. */
6196 unsigned int
6197 tree_map_hash (const void *item)
6199 return (((const struct tree_map *) item)->hash);
6202 /* Hash a from tree in a tree_decl_map. */
6204 unsigned int
6205 tree_decl_map_hash (const void *item)
6207 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6210 /* Return the initialization priority for DECL. */
6212 priority_type
6213 decl_init_priority_lookup (tree decl)
6215 symtab_node *snode = symtab_node::get (decl);
6217 if (!snode)
6218 return DEFAULT_INIT_PRIORITY;
6219 return
6220 snode->get_init_priority ();
6223 /* Return the finalization priority for DECL. */
6225 priority_type
6226 decl_fini_priority_lookup (tree decl)
6228 cgraph_node *node = cgraph_node::get (decl);
6230 if (!node)
6231 return DEFAULT_INIT_PRIORITY;
6232 return
6233 node->get_fini_priority ();
6236 /* Set the initialization priority for DECL to PRIORITY. */
6238 void
6239 decl_init_priority_insert (tree decl, priority_type priority)
6241 struct symtab_node *snode;
6243 if (priority == DEFAULT_INIT_PRIORITY)
6245 snode = symtab_node::get (decl);
6246 if (!snode)
6247 return;
6249 else if (VAR_P (decl))
6250 snode = varpool_node::get_create (decl);
6251 else
6252 snode = cgraph_node::get_create (decl);
6253 snode->set_init_priority (priority);
6256 /* Set the finalization priority for DECL to PRIORITY. */
6258 void
6259 decl_fini_priority_insert (tree decl, priority_type priority)
6261 struct cgraph_node *node;
6263 if (priority == DEFAULT_INIT_PRIORITY)
6265 node = cgraph_node::get (decl);
6266 if (!node)
6267 return;
6269 else
6270 node = cgraph_node::get_create (decl);
6271 node->set_fini_priority (priority);
6274 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6276 static void
6277 print_debug_expr_statistics (void)
6279 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6280 (long) debug_expr_for_decl->size (),
6281 (long) debug_expr_for_decl->elements (),
6282 debug_expr_for_decl->collisions ());
6285 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6287 static void
6288 print_value_expr_statistics (void)
6290 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6291 (long) value_expr_for_decl->size (),
6292 (long) value_expr_for_decl->elements (),
6293 value_expr_for_decl->collisions ());
6296 /* Lookup a debug expression for FROM, and return it if we find one. */
6298 tree
6299 decl_debug_expr_lookup (tree from)
6301 struct tree_decl_map *h, in;
6302 in.base.from = from;
6304 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6305 if (h)
6306 return h->to;
6307 return NULL_TREE;
6310 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6312 void
6313 decl_debug_expr_insert (tree from, tree to)
6315 struct tree_decl_map *h;
6317 h = ggc_alloc<tree_decl_map> ();
6318 h->base.from = from;
6319 h->to = to;
6320 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6323 /* Lookup a value expression for FROM, and return it if we find one. */
6325 tree
6326 decl_value_expr_lookup (tree from)
6328 struct tree_decl_map *h, in;
6329 in.base.from = from;
6331 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6332 if (h)
6333 return h->to;
6334 return NULL_TREE;
6337 /* Insert a mapping FROM->TO in the value expression hashtable. */
6339 void
6340 decl_value_expr_insert (tree from, tree to)
6342 struct tree_decl_map *h;
6344 h = ggc_alloc<tree_decl_map> ();
6345 h->base.from = from;
6346 h->to = to;
6347 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6350 /* Lookup a vector of debug arguments for FROM, and return it if we
6351 find one. */
6353 vec<tree, va_gc> **
6354 decl_debug_args_lookup (tree from)
6356 struct tree_vec_map *h, in;
6358 if (!DECL_HAS_DEBUG_ARGS_P (from))
6359 return NULL;
6360 gcc_checking_assert (debug_args_for_decl != NULL);
6361 in.base.from = from;
6362 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6363 if (h)
6364 return &h->to;
6365 return NULL;
6368 /* Insert a mapping FROM->empty vector of debug arguments in the value
6369 expression hashtable. */
6371 vec<tree, va_gc> **
6372 decl_debug_args_insert (tree from)
6374 struct tree_vec_map *h;
6375 tree_vec_map **loc;
6377 if (DECL_HAS_DEBUG_ARGS_P (from))
6378 return decl_debug_args_lookup (from);
6379 if (debug_args_for_decl == NULL)
6380 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6381 h = ggc_alloc<tree_vec_map> ();
6382 h->base.from = from;
6383 h->to = NULL;
6384 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6385 *loc = h;
6386 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6387 return &h->to;
6390 /* Hashing of types so that we don't make duplicates.
6391 The entry point is `type_hash_canon'. */
6393 /* Generate the default hash code for TYPE. This is designed for
6394 speed, rather than maximum entropy. */
6396 hashval_t
6397 type_hash_canon_hash (tree type)
6399 inchash::hash hstate;
6401 hstate.add_int (TREE_CODE (type));
6403 if (TREE_TYPE (type))
6404 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6406 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6407 /* Just the identifier is adequate to distinguish. */
6408 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6410 switch (TREE_CODE (type))
6412 case METHOD_TYPE:
6413 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6414 /* FALLTHROUGH. */
6415 case FUNCTION_TYPE:
6416 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6417 if (TREE_VALUE (t) != error_mark_node)
6418 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6419 break;
6421 case OFFSET_TYPE:
6422 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6423 break;
6425 case ARRAY_TYPE:
6427 if (TYPE_DOMAIN (type))
6428 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6429 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6431 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6432 hstate.add_object (typeless);
6435 break;
6437 case INTEGER_TYPE:
6439 tree t = TYPE_MAX_VALUE (type);
6440 if (!t)
6441 t = TYPE_MIN_VALUE (type);
6442 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6443 hstate.add_object (TREE_INT_CST_ELT (t, i));
6444 break;
6447 case REAL_TYPE:
6448 case FIXED_POINT_TYPE:
6450 unsigned prec = TYPE_PRECISION (type);
6451 hstate.add_object (prec);
6452 break;
6455 case VECTOR_TYPE:
6456 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6457 break;
6459 default:
6460 break;
6463 return hstate.end ();
6466 /* These are the Hashtable callback functions. */
6468 /* Returns true iff the types are equivalent. */
6470 bool
6471 type_cache_hasher::equal (type_hash *a, type_hash *b)
6473 /* First test the things that are the same for all types. */
6474 if (a->hash != b->hash
6475 || TREE_CODE (a->type) != TREE_CODE (b->type)
6476 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6477 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6478 TYPE_ATTRIBUTES (b->type))
6479 || (TREE_CODE (a->type) != COMPLEX_TYPE
6480 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6481 return 0;
6483 /* Be careful about comparing arrays before and after the element type
6484 has been completed; don't compare TYPE_ALIGN unless both types are
6485 complete. */
6486 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6487 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6488 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6489 return 0;
6491 switch (TREE_CODE (a->type))
6493 case VOID_TYPE:
6494 case COMPLEX_TYPE:
6495 case POINTER_TYPE:
6496 case REFERENCE_TYPE:
6497 case NULLPTR_TYPE:
6498 return 1;
6500 case VECTOR_TYPE:
6501 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6502 TYPE_VECTOR_SUBPARTS (b->type));
6504 case ENUMERAL_TYPE:
6505 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6506 && !(TYPE_VALUES (a->type)
6507 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6508 && TYPE_VALUES (b->type)
6509 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6510 && type_list_equal (TYPE_VALUES (a->type),
6511 TYPE_VALUES (b->type))))
6512 return 0;
6514 /* fall through */
6516 case INTEGER_TYPE:
6517 case REAL_TYPE:
6518 case BOOLEAN_TYPE:
6519 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6520 return false;
6521 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6522 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6523 TYPE_MAX_VALUE (b->type)))
6524 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6525 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6526 TYPE_MIN_VALUE (b->type))));
6528 case FIXED_POINT_TYPE:
6529 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6531 case OFFSET_TYPE:
6532 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6534 case METHOD_TYPE:
6535 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6536 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6537 || (TYPE_ARG_TYPES (a->type)
6538 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6539 && TYPE_ARG_TYPES (b->type)
6540 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6541 && type_list_equal (TYPE_ARG_TYPES (a->type),
6542 TYPE_ARG_TYPES (b->type)))))
6543 break;
6544 return 0;
6545 case ARRAY_TYPE:
6546 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6547 where the flag should be inherited from the element type
6548 and can change after ARRAY_TYPEs are created; on non-aggregates
6549 compare it and hash it, scalars will never have that flag set
6550 and we need to differentiate between arrays created by different
6551 front-ends or middle-end created arrays. */
6552 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6553 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6554 || (TYPE_TYPELESS_STORAGE (a->type)
6555 == TYPE_TYPELESS_STORAGE (b->type))));
6557 case RECORD_TYPE:
6558 case UNION_TYPE:
6559 case QUAL_UNION_TYPE:
6560 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6561 || (TYPE_FIELDS (a->type)
6562 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6563 && TYPE_FIELDS (b->type)
6564 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6565 && type_list_equal (TYPE_FIELDS (a->type),
6566 TYPE_FIELDS (b->type))));
6568 case FUNCTION_TYPE:
6569 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6570 || (TYPE_ARG_TYPES (a->type)
6571 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6572 && TYPE_ARG_TYPES (b->type)
6573 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6574 && type_list_equal (TYPE_ARG_TYPES (a->type),
6575 TYPE_ARG_TYPES (b->type))))
6576 break;
6577 return 0;
6579 default:
6580 return 0;
6583 if (lang_hooks.types.type_hash_eq != NULL)
6584 return lang_hooks.types.type_hash_eq (a->type, b->type);
6586 return 1;
6589 /* Given TYPE, and HASHCODE its hash code, return the canonical
6590 object for an identical type if one already exists.
6591 Otherwise, return TYPE, and record it as the canonical object.
6593 To use this function, first create a type of the sort you want.
6594 Then compute its hash code from the fields of the type that
6595 make it different from other similar types.
6596 Then call this function and use the value. */
6598 tree
6599 type_hash_canon (unsigned int hashcode, tree type)
6601 type_hash in;
6602 type_hash **loc;
6604 /* The hash table only contains main variants, so ensure that's what we're
6605 being passed. */
6606 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6608 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6609 must call that routine before comparing TYPE_ALIGNs. */
6610 layout_type (type);
6612 in.hash = hashcode;
6613 in.type = type;
6615 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6616 if (*loc)
6618 tree t1 = ((type_hash *) *loc)->type;
6619 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6620 if (TYPE_UID (type) + 1 == next_type_uid)
6621 --next_type_uid;
6622 /* Free also min/max values and the cache for integer
6623 types. This can't be done in free_node, as LTO frees
6624 those on its own. */
6625 if (TREE_CODE (type) == INTEGER_TYPE)
6627 if (TYPE_MIN_VALUE (type)
6628 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6630 /* Zero is always in TYPE_CACHED_VALUES. */
6631 if (! TYPE_UNSIGNED (type))
6632 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6633 ggc_free (TYPE_MIN_VALUE (type));
6635 if (TYPE_MAX_VALUE (type)
6636 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6638 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6639 ggc_free (TYPE_MAX_VALUE (type));
6641 if (TYPE_CACHED_VALUES_P (type))
6642 ggc_free (TYPE_CACHED_VALUES (type));
6644 free_node (type);
6645 return t1;
6647 else
6649 struct type_hash *h;
6651 h = ggc_alloc<type_hash> ();
6652 h->hash = hashcode;
6653 h->type = type;
6654 *loc = h;
6656 return type;
6660 static void
6661 print_type_hash_statistics (void)
6663 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6664 (long) type_hash_table->size (),
6665 (long) type_hash_table->elements (),
6666 type_hash_table->collisions ());
6669 /* Given two lists of types
6670 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6671 return 1 if the lists contain the same types in the same order.
6672 Also, the TREE_PURPOSEs must match. */
6675 type_list_equal (const_tree l1, const_tree l2)
6677 const_tree t1, t2;
6679 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6680 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6681 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6682 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6683 && (TREE_TYPE (TREE_PURPOSE (t1))
6684 == TREE_TYPE (TREE_PURPOSE (t2))))))
6685 return 0;
6687 return t1 == t2;
6690 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6691 given by TYPE. If the argument list accepts variable arguments,
6692 then this function counts only the ordinary arguments. */
6695 type_num_arguments (const_tree type)
6697 int i = 0;
6698 tree t;
6700 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6701 /* If the function does not take a variable number of arguments,
6702 the last element in the list will have type `void'. */
6703 if (VOID_TYPE_P (TREE_VALUE (t)))
6704 break;
6705 else
6706 ++i;
6708 return i;
6711 /* Nonzero if integer constants T1 and T2
6712 represent the same constant value. */
6715 tree_int_cst_equal (const_tree t1, const_tree t2)
6717 if (t1 == t2)
6718 return 1;
6720 if (t1 == 0 || t2 == 0)
6721 return 0;
6723 if (TREE_CODE (t1) == INTEGER_CST
6724 && TREE_CODE (t2) == INTEGER_CST
6725 && wi::to_widest (t1) == wi::to_widest (t2))
6726 return 1;
6728 return 0;
6731 /* Return true if T is an INTEGER_CST whose numerical value (extended
6732 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6734 bool
6735 tree_fits_shwi_p (const_tree t)
6737 return (t != NULL_TREE
6738 && TREE_CODE (t) == INTEGER_CST
6739 && wi::fits_shwi_p (wi::to_widest (t)));
6742 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6743 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6745 bool
6746 tree_fits_poly_int64_p (const_tree t)
6748 if (t == NULL_TREE)
6749 return false;
6750 if (POLY_INT_CST_P (t))
6752 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6753 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6754 return false;
6755 return true;
6757 return (TREE_CODE (t) == INTEGER_CST
6758 && wi::fits_shwi_p (wi::to_widest (t)));
6761 /* Return true if T is an INTEGER_CST whose numerical value (extended
6762 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6764 bool
6765 tree_fits_uhwi_p (const_tree t)
6767 return (t != NULL_TREE
6768 && TREE_CODE (t) == INTEGER_CST
6769 && wi::fits_uhwi_p (wi::to_widest (t)));
6772 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6773 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6775 bool
6776 tree_fits_poly_uint64_p (const_tree t)
6778 if (t == NULL_TREE)
6779 return false;
6780 if (POLY_INT_CST_P (t))
6782 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6783 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6784 return false;
6785 return true;
6787 return (TREE_CODE (t) == INTEGER_CST
6788 && wi::fits_uhwi_p (wi::to_widest (t)));
6791 /* T is an INTEGER_CST whose numerical value (extended according to
6792 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6793 HOST_WIDE_INT. */
6795 HOST_WIDE_INT
6796 tree_to_shwi (const_tree t)
6798 gcc_assert (tree_fits_shwi_p (t));
6799 return TREE_INT_CST_LOW (t);
6802 /* T is an INTEGER_CST whose numerical value (extended according to
6803 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6804 HOST_WIDE_INT. */
6806 unsigned HOST_WIDE_INT
6807 tree_to_uhwi (const_tree t)
6809 gcc_assert (tree_fits_uhwi_p (t));
6810 return TREE_INT_CST_LOW (t);
6813 /* Return the most significant (sign) bit of T. */
6816 tree_int_cst_sign_bit (const_tree t)
6818 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6820 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6823 /* Return an indication of the sign of the integer constant T.
6824 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6825 Note that -1 will never be returned if T's type is unsigned. */
6828 tree_int_cst_sgn (const_tree t)
6830 if (wi::to_wide (t) == 0)
6831 return 0;
6832 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6833 return 1;
6834 else if (wi::neg_p (wi::to_wide (t)))
6835 return -1;
6836 else
6837 return 1;
6840 /* Return the minimum number of bits needed to represent VALUE in a
6841 signed or unsigned type, UNSIGNEDP says which. */
6843 unsigned int
6844 tree_int_cst_min_precision (tree value, signop sgn)
6846 /* If the value is negative, compute its negative minus 1. The latter
6847 adjustment is because the absolute value of the largest negative value
6848 is one larger than the largest positive value. This is equivalent to
6849 a bit-wise negation, so use that operation instead. */
6851 if (tree_int_cst_sgn (value) < 0)
6852 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6854 /* Return the number of bits needed, taking into account the fact
6855 that we need one more bit for a signed than unsigned type.
6856 If value is 0 or -1, the minimum precision is 1 no matter
6857 whether unsignedp is true or false. */
6859 if (integer_zerop (value))
6860 return 1;
6861 else
6862 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6865 /* Return truthvalue of whether T1 is the same tree structure as T2.
6866 Return 1 if they are the same.
6867 Return 0 if they are understandably different.
6868 Return -1 if either contains tree structure not understood by
6869 this function. */
6872 simple_cst_equal (const_tree t1, const_tree t2)
6874 enum tree_code code1, code2;
6875 int cmp;
6876 int i;
6878 if (t1 == t2)
6879 return 1;
6880 if (t1 == 0 || t2 == 0)
6881 return 0;
6883 code1 = TREE_CODE (t1);
6884 code2 = TREE_CODE (t2);
6886 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6888 if (CONVERT_EXPR_CODE_P (code2)
6889 || code2 == NON_LVALUE_EXPR)
6890 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6891 else
6892 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6895 else if (CONVERT_EXPR_CODE_P (code2)
6896 || code2 == NON_LVALUE_EXPR)
6897 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6899 if (code1 != code2)
6900 return 0;
6902 switch (code1)
6904 case INTEGER_CST:
6905 return wi::to_widest (t1) == wi::to_widest (t2);
6907 case REAL_CST:
6908 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6910 case FIXED_CST:
6911 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6913 case STRING_CST:
6914 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6915 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6916 TREE_STRING_LENGTH (t1)));
6918 case CONSTRUCTOR:
6920 unsigned HOST_WIDE_INT idx;
6921 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6922 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6924 if (vec_safe_length (v1) != vec_safe_length (v2))
6925 return false;
6927 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6928 /* ??? Should we handle also fields here? */
6929 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6930 return false;
6931 return true;
6934 case SAVE_EXPR:
6935 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6937 case CALL_EXPR:
6938 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6939 if (cmp <= 0)
6940 return cmp;
6941 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6942 return 0;
6944 const_tree arg1, arg2;
6945 const_call_expr_arg_iterator iter1, iter2;
6946 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6947 arg2 = first_const_call_expr_arg (t2, &iter2);
6948 arg1 && arg2;
6949 arg1 = next_const_call_expr_arg (&iter1),
6950 arg2 = next_const_call_expr_arg (&iter2))
6952 cmp = simple_cst_equal (arg1, arg2);
6953 if (cmp <= 0)
6954 return cmp;
6956 return arg1 == arg2;
6959 case TARGET_EXPR:
6960 /* Special case: if either target is an unallocated VAR_DECL,
6961 it means that it's going to be unified with whatever the
6962 TARGET_EXPR is really supposed to initialize, so treat it
6963 as being equivalent to anything. */
6964 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6965 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6966 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6967 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6968 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6969 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6970 cmp = 1;
6971 else
6972 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6974 if (cmp <= 0)
6975 return cmp;
6977 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6979 case WITH_CLEANUP_EXPR:
6980 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6981 if (cmp <= 0)
6982 return cmp;
6984 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6986 case COMPONENT_REF:
6987 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6988 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6990 return 0;
6992 case VAR_DECL:
6993 case PARM_DECL:
6994 case CONST_DECL:
6995 case FUNCTION_DECL:
6996 return 0;
6998 default:
6999 if (POLY_INT_CST_P (t1))
7000 /* A false return means maybe_ne rather than known_ne. */
7001 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7002 TYPE_SIGN (TREE_TYPE (t1))),
7003 poly_widest_int::from (poly_int_cst_value (t2),
7004 TYPE_SIGN (TREE_TYPE (t2))));
7005 break;
7008 /* This general rule works for most tree codes. All exceptions should be
7009 handled above. If this is a language-specific tree code, we can't
7010 trust what might be in the operand, so say we don't know
7011 the situation. */
7012 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7013 return -1;
7015 switch (TREE_CODE_CLASS (code1))
7017 case tcc_unary:
7018 case tcc_binary:
7019 case tcc_comparison:
7020 case tcc_expression:
7021 case tcc_reference:
7022 case tcc_statement:
7023 cmp = 1;
7024 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7026 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7027 if (cmp <= 0)
7028 return cmp;
7031 return cmp;
7033 default:
7034 return -1;
7038 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7039 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7040 than U, respectively. */
7043 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7045 if (tree_int_cst_sgn (t) < 0)
7046 return -1;
7047 else if (!tree_fits_uhwi_p (t))
7048 return 1;
7049 else if (TREE_INT_CST_LOW (t) == u)
7050 return 0;
7051 else if (TREE_INT_CST_LOW (t) < u)
7052 return -1;
7053 else
7054 return 1;
7057 /* Return true if SIZE represents a constant size that is in bounds of
7058 what the middle-end and the backend accepts (covering not more than
7059 half of the address-space). */
7061 bool
7062 valid_constant_size_p (const_tree size)
7064 if (POLY_INT_CST_P (size))
7066 if (TREE_OVERFLOW (size))
7067 return false;
7068 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7069 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7070 return false;
7071 return true;
7073 if (! tree_fits_uhwi_p (size)
7074 || TREE_OVERFLOW (size)
7075 || tree_int_cst_sign_bit (size) != 0)
7076 return false;
7077 return true;
7080 /* Return the precision of the type, or for a complex or vector type the
7081 precision of the type of its elements. */
7083 unsigned int
7084 element_precision (const_tree type)
7086 if (!TYPE_P (type))
7087 type = TREE_TYPE (type);
7088 enum tree_code code = TREE_CODE (type);
7089 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7090 type = TREE_TYPE (type);
7092 return TYPE_PRECISION (type);
7095 /* Return true if CODE represents an associative tree code. Otherwise
7096 return false. */
7097 bool
7098 associative_tree_code (enum tree_code code)
7100 switch (code)
7102 case BIT_IOR_EXPR:
7103 case BIT_AND_EXPR:
7104 case BIT_XOR_EXPR:
7105 case PLUS_EXPR:
7106 case MULT_EXPR:
7107 case MIN_EXPR:
7108 case MAX_EXPR:
7109 return true;
7111 default:
7112 break;
7114 return false;
7117 /* Return true if CODE represents a commutative tree code. Otherwise
7118 return false. */
7119 bool
7120 commutative_tree_code (enum tree_code code)
7122 switch (code)
7124 case PLUS_EXPR:
7125 case MULT_EXPR:
7126 case MULT_HIGHPART_EXPR:
7127 case MIN_EXPR:
7128 case MAX_EXPR:
7129 case BIT_IOR_EXPR:
7130 case BIT_XOR_EXPR:
7131 case BIT_AND_EXPR:
7132 case NE_EXPR:
7133 case EQ_EXPR:
7134 case UNORDERED_EXPR:
7135 case ORDERED_EXPR:
7136 case UNEQ_EXPR:
7137 case LTGT_EXPR:
7138 case TRUTH_AND_EXPR:
7139 case TRUTH_XOR_EXPR:
7140 case TRUTH_OR_EXPR:
7141 case WIDEN_MULT_EXPR:
7142 case VEC_WIDEN_MULT_HI_EXPR:
7143 case VEC_WIDEN_MULT_LO_EXPR:
7144 case VEC_WIDEN_MULT_EVEN_EXPR:
7145 case VEC_WIDEN_MULT_ODD_EXPR:
7146 return true;
7148 default:
7149 break;
7151 return false;
7154 /* Return true if CODE represents a ternary tree code for which the
7155 first two operands are commutative. Otherwise return false. */
7156 bool
7157 commutative_ternary_tree_code (enum tree_code code)
7159 switch (code)
7161 case WIDEN_MULT_PLUS_EXPR:
7162 case WIDEN_MULT_MINUS_EXPR:
7163 case DOT_PROD_EXPR:
7164 case FMA_EXPR:
7165 return true;
7167 default:
7168 break;
7170 return false;
7173 /* Returns true if CODE can overflow. */
7175 bool
7176 operation_can_overflow (enum tree_code code)
7178 switch (code)
7180 case PLUS_EXPR:
7181 case MINUS_EXPR:
7182 case MULT_EXPR:
7183 case LSHIFT_EXPR:
7184 /* Can overflow in various ways. */
7185 return true;
7186 case TRUNC_DIV_EXPR:
7187 case EXACT_DIV_EXPR:
7188 case FLOOR_DIV_EXPR:
7189 case CEIL_DIV_EXPR:
7190 /* For INT_MIN / -1. */
7191 return true;
7192 case NEGATE_EXPR:
7193 case ABS_EXPR:
7194 /* For -INT_MIN. */
7195 return true;
7196 default:
7197 /* These operators cannot overflow. */
7198 return false;
7202 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7203 ftrapv doesn't generate trapping insns for CODE. */
7205 bool
7206 operation_no_trapping_overflow (tree type, enum tree_code code)
7208 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7210 /* We don't generate instructions that trap on overflow for complex or vector
7211 types. */
7212 if (!INTEGRAL_TYPE_P (type))
7213 return true;
7215 if (!TYPE_OVERFLOW_TRAPS (type))
7216 return true;
7218 switch (code)
7220 case PLUS_EXPR:
7221 case MINUS_EXPR:
7222 case MULT_EXPR:
7223 case NEGATE_EXPR:
7224 case ABS_EXPR:
7225 /* These operators can overflow, and -ftrapv generates trapping code for
7226 these. */
7227 return false;
7228 case TRUNC_DIV_EXPR:
7229 case EXACT_DIV_EXPR:
7230 case FLOOR_DIV_EXPR:
7231 case CEIL_DIV_EXPR:
7232 case LSHIFT_EXPR:
7233 /* These operators can overflow, but -ftrapv does not generate trapping
7234 code for these. */
7235 return true;
7236 default:
7237 /* These operators cannot overflow. */
7238 return true;
7242 namespace inchash
7245 /* Generate a hash value for an expression. This can be used iteratively
7246 by passing a previous result as the HSTATE argument.
7248 This function is intended to produce the same hash for expressions which
7249 would compare equal using operand_equal_p. */
7250 void
7251 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7253 int i;
7254 enum tree_code code;
7255 enum tree_code_class tclass;
7257 if (t == NULL_TREE || t == error_mark_node)
7259 hstate.merge_hash (0);
7260 return;
7263 if (!(flags & OEP_ADDRESS_OF))
7264 STRIP_NOPS (t);
7266 code = TREE_CODE (t);
7268 switch (code)
7270 /* Alas, constants aren't shared, so we can't rely on pointer
7271 identity. */
7272 case VOID_CST:
7273 hstate.merge_hash (0);
7274 return;
7275 case INTEGER_CST:
7276 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7277 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7278 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7279 return;
7280 case REAL_CST:
7282 unsigned int val2;
7283 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7284 val2 = rvc_zero;
7285 else
7286 val2 = real_hash (TREE_REAL_CST_PTR (t));
7287 hstate.merge_hash (val2);
7288 return;
7290 case FIXED_CST:
7292 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7293 hstate.merge_hash (val2);
7294 return;
7296 case STRING_CST:
7297 hstate.add ((const void *) TREE_STRING_POINTER (t),
7298 TREE_STRING_LENGTH (t));
7299 return;
7300 case COMPLEX_CST:
7301 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7302 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7303 return;
7304 case VECTOR_CST:
7306 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7307 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7308 unsigned int count = vector_cst_encoded_nelts (t);
7309 for (unsigned int i = 0; i < count; ++i)
7310 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7311 return;
7313 case SSA_NAME:
7314 /* We can just compare by pointer. */
7315 hstate.add_hwi (SSA_NAME_VERSION (t));
7316 return;
7317 case PLACEHOLDER_EXPR:
7318 /* The node itself doesn't matter. */
7319 return;
7320 case BLOCK:
7321 case OMP_CLAUSE:
7322 /* Ignore. */
7323 return;
7324 case TREE_LIST:
7325 /* A list of expressions, for a CALL_EXPR or as the elements of a
7326 VECTOR_CST. */
7327 for (; t; t = TREE_CHAIN (t))
7328 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7329 return;
7330 case CONSTRUCTOR:
7332 unsigned HOST_WIDE_INT idx;
7333 tree field, value;
7334 flags &= ~OEP_ADDRESS_OF;
7335 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7337 inchash::add_expr (field, hstate, flags);
7338 inchash::add_expr (value, hstate, flags);
7340 return;
7342 case STATEMENT_LIST:
7344 tree_stmt_iterator i;
7345 for (i = tsi_start (CONST_CAST_TREE (t));
7346 !tsi_end_p (i); tsi_next (&i))
7347 inchash::add_expr (tsi_stmt (i), hstate, flags);
7348 return;
7350 case TREE_VEC:
7351 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7352 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7353 return;
7354 case FUNCTION_DECL:
7355 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7356 Otherwise nodes that compare equal according to operand_equal_p might
7357 get different hash codes. However, don't do this for machine specific
7358 or front end builtins, since the function code is overloaded in those
7359 cases. */
7360 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7361 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7363 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7364 code = TREE_CODE (t);
7366 /* FALL THROUGH */
7367 default:
7368 if (POLY_INT_CST_P (t))
7370 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7371 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7372 return;
7374 tclass = TREE_CODE_CLASS (code);
7376 if (tclass == tcc_declaration)
7378 /* DECL's have a unique ID */
7379 hstate.add_hwi (DECL_UID (t));
7381 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7383 /* For comparisons that can be swapped, use the lower
7384 tree code. */
7385 enum tree_code ccode = swap_tree_comparison (code);
7386 if (code < ccode)
7387 ccode = code;
7388 hstate.add_object (ccode);
7389 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7390 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7392 else if (CONVERT_EXPR_CODE_P (code))
7394 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7395 operand_equal_p. */
7396 enum tree_code ccode = NOP_EXPR;
7397 hstate.add_object (ccode);
7399 /* Don't hash the type, that can lead to having nodes which
7400 compare equal according to operand_equal_p, but which
7401 have different hash codes. Make sure to include signedness
7402 in the hash computation. */
7403 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7404 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7406 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7407 else if (code == MEM_REF
7408 && (flags & OEP_ADDRESS_OF) != 0
7409 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7410 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7411 && integer_zerop (TREE_OPERAND (t, 1)))
7412 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7413 hstate, flags);
7414 /* Don't ICE on FE specific trees, or their arguments etc.
7415 during operand_equal_p hash verification. */
7416 else if (!IS_EXPR_CODE_CLASS (tclass))
7417 gcc_assert (flags & OEP_HASH_CHECK);
7418 else
7420 unsigned int sflags = flags;
7422 hstate.add_object (code);
7424 switch (code)
7426 case ADDR_EXPR:
7427 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7428 flags |= OEP_ADDRESS_OF;
7429 sflags = flags;
7430 break;
7432 case INDIRECT_REF:
7433 case MEM_REF:
7434 case TARGET_MEM_REF:
7435 flags &= ~OEP_ADDRESS_OF;
7436 sflags = flags;
7437 break;
7439 case ARRAY_REF:
7440 case ARRAY_RANGE_REF:
7441 case COMPONENT_REF:
7442 case BIT_FIELD_REF:
7443 sflags &= ~OEP_ADDRESS_OF;
7444 break;
7446 case COND_EXPR:
7447 flags &= ~OEP_ADDRESS_OF;
7448 break;
7450 case FMA_EXPR:
7451 case WIDEN_MULT_PLUS_EXPR:
7452 case WIDEN_MULT_MINUS_EXPR:
7454 /* The multiplication operands are commutative. */
7455 inchash::hash one, two;
7456 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7457 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7458 hstate.add_commutative (one, two);
7459 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7460 return;
7463 case CALL_EXPR:
7464 if (CALL_EXPR_FN (t) == NULL_TREE)
7465 hstate.add_int (CALL_EXPR_IFN (t));
7466 break;
7468 case TARGET_EXPR:
7469 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7470 Usually different TARGET_EXPRs just should use
7471 different temporaries in their slots. */
7472 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7473 return;
7475 default:
7476 break;
7479 /* Don't hash the type, that can lead to having nodes which
7480 compare equal according to operand_equal_p, but which
7481 have different hash codes. */
7482 if (code == NON_LVALUE_EXPR)
7484 /* Make sure to include signness in the hash computation. */
7485 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7486 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7489 else if (commutative_tree_code (code))
7491 /* It's a commutative expression. We want to hash it the same
7492 however it appears. We do this by first hashing both operands
7493 and then rehashing based on the order of their independent
7494 hashes. */
7495 inchash::hash one, two;
7496 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7497 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7498 hstate.add_commutative (one, two);
7500 else
7501 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7502 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7503 i == 0 ? flags : sflags);
7505 return;
7511 /* Constructors for pointer, array and function types.
7512 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7513 constructed by language-dependent code, not here.) */
7515 /* Construct, lay out and return the type of pointers to TO_TYPE with
7516 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7517 reference all of memory. If such a type has already been
7518 constructed, reuse it. */
7520 tree
7521 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7522 bool can_alias_all)
7524 tree t;
7525 bool could_alias = can_alias_all;
7527 if (to_type == error_mark_node)
7528 return error_mark_node;
7530 /* If the pointed-to type has the may_alias attribute set, force
7531 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7532 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7533 can_alias_all = true;
7535 /* In some cases, languages will have things that aren't a POINTER_TYPE
7536 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7537 In that case, return that type without regard to the rest of our
7538 operands.
7540 ??? This is a kludge, but consistent with the way this function has
7541 always operated and there doesn't seem to be a good way to avoid this
7542 at the moment. */
7543 if (TYPE_POINTER_TO (to_type) != 0
7544 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7545 return TYPE_POINTER_TO (to_type);
7547 /* First, if we already have a type for pointers to TO_TYPE and it's
7548 the proper mode, use it. */
7549 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7550 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7551 return t;
7553 t = make_node (POINTER_TYPE);
7555 TREE_TYPE (t) = to_type;
7556 SET_TYPE_MODE (t, mode);
7557 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7558 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7559 TYPE_POINTER_TO (to_type) = t;
7561 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7562 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7563 SET_TYPE_STRUCTURAL_EQUALITY (t);
7564 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7565 TYPE_CANONICAL (t)
7566 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7567 mode, false);
7569 /* Lay out the type. This function has many callers that are concerned
7570 with expression-construction, and this simplifies them all. */
7571 layout_type (t);
7573 return t;
7576 /* By default build pointers in ptr_mode. */
7578 tree
7579 build_pointer_type (tree to_type)
7581 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7582 : TYPE_ADDR_SPACE (to_type);
7583 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7584 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7587 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7589 tree
7590 build_reference_type_for_mode (tree to_type, machine_mode mode,
7591 bool can_alias_all)
7593 tree t;
7594 bool could_alias = can_alias_all;
7596 if (to_type == error_mark_node)
7597 return error_mark_node;
7599 /* If the pointed-to type has the may_alias attribute set, force
7600 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7601 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7602 can_alias_all = true;
7604 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7605 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7606 In that case, return that type without regard to the rest of our
7607 operands.
7609 ??? This is a kludge, but consistent with the way this function has
7610 always operated and there doesn't seem to be a good way to avoid this
7611 at the moment. */
7612 if (TYPE_REFERENCE_TO (to_type) != 0
7613 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7614 return TYPE_REFERENCE_TO (to_type);
7616 /* First, if we already have a type for pointers to TO_TYPE and it's
7617 the proper mode, use it. */
7618 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7619 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7620 return t;
7622 t = make_node (REFERENCE_TYPE);
7624 TREE_TYPE (t) = to_type;
7625 SET_TYPE_MODE (t, mode);
7626 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7627 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7628 TYPE_REFERENCE_TO (to_type) = t;
7630 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7631 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7632 SET_TYPE_STRUCTURAL_EQUALITY (t);
7633 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7634 TYPE_CANONICAL (t)
7635 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7636 mode, false);
7638 layout_type (t);
7640 return t;
7644 /* Build the node for the type of references-to-TO_TYPE by default
7645 in ptr_mode. */
7647 tree
7648 build_reference_type (tree to_type)
7650 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7651 : TYPE_ADDR_SPACE (to_type);
7652 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7653 return build_reference_type_for_mode (to_type, pointer_mode, false);
7656 #define MAX_INT_CACHED_PREC \
7657 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7658 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7660 /* Builds a signed or unsigned integer type of precision PRECISION.
7661 Used for C bitfields whose precision does not match that of
7662 built-in target types. */
7663 tree
7664 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7665 int unsignedp)
7667 tree itype, ret;
7669 if (unsignedp)
7670 unsignedp = MAX_INT_CACHED_PREC + 1;
7672 if (precision <= MAX_INT_CACHED_PREC)
7674 itype = nonstandard_integer_type_cache[precision + unsignedp];
7675 if (itype)
7676 return itype;
7679 itype = make_node (INTEGER_TYPE);
7680 TYPE_PRECISION (itype) = precision;
7682 if (unsignedp)
7683 fixup_unsigned_type (itype);
7684 else
7685 fixup_signed_type (itype);
7687 ret = itype;
7689 inchash::hash hstate;
7690 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7691 ret = type_hash_canon (hstate.end (), itype);
7692 if (precision <= MAX_INT_CACHED_PREC)
7693 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7695 return ret;
7698 #define MAX_BOOL_CACHED_PREC \
7699 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7700 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7702 /* Builds a boolean type of precision PRECISION.
7703 Used for boolean vectors to choose proper vector element size. */
7704 tree
7705 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7707 tree type;
7709 if (precision <= MAX_BOOL_CACHED_PREC)
7711 type = nonstandard_boolean_type_cache[precision];
7712 if (type)
7713 return type;
7716 type = make_node (BOOLEAN_TYPE);
7717 TYPE_PRECISION (type) = precision;
7718 fixup_signed_type (type);
7720 if (precision <= MAX_INT_CACHED_PREC)
7721 nonstandard_boolean_type_cache[precision] = type;
7723 return type;
7726 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7727 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7728 is true, reuse such a type that has already been constructed. */
7730 static tree
7731 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7733 tree itype = make_node (INTEGER_TYPE);
7735 TREE_TYPE (itype) = type;
7737 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7738 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7740 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7741 SET_TYPE_MODE (itype, TYPE_MODE (type));
7742 TYPE_SIZE (itype) = TYPE_SIZE (type);
7743 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7744 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7745 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7746 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7748 if (!shared)
7749 return itype;
7751 if ((TYPE_MIN_VALUE (itype)
7752 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7753 || (TYPE_MAX_VALUE (itype)
7754 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7756 /* Since we cannot reliably merge this type, we need to compare it using
7757 structural equality checks. */
7758 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7759 return itype;
7762 hashval_t hash = type_hash_canon_hash (itype);
7763 itype = type_hash_canon (hash, itype);
7765 return itype;
7768 /* Wrapper around build_range_type_1 with SHARED set to true. */
7770 tree
7771 build_range_type (tree type, tree lowval, tree highval)
7773 return build_range_type_1 (type, lowval, highval, true);
7776 /* Wrapper around build_range_type_1 with SHARED set to false. */
7778 tree
7779 build_nonshared_range_type (tree type, tree lowval, tree highval)
7781 return build_range_type_1 (type, lowval, highval, false);
7784 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7785 MAXVAL should be the maximum value in the domain
7786 (one less than the length of the array).
7788 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7789 We don't enforce this limit, that is up to caller (e.g. language front end).
7790 The limit exists because the result is a signed type and we don't handle
7791 sizes that use more than one HOST_WIDE_INT. */
7793 tree
7794 build_index_type (tree maxval)
7796 return build_range_type (sizetype, size_zero_node, maxval);
7799 /* Return true if the debug information for TYPE, a subtype, should be emitted
7800 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7801 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7802 debug info and doesn't reflect the source code. */
7804 bool
7805 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7807 tree base_type = TREE_TYPE (type), low, high;
7809 /* Subrange types have a base type which is an integral type. */
7810 if (!INTEGRAL_TYPE_P (base_type))
7811 return false;
7813 /* Get the real bounds of the subtype. */
7814 if (lang_hooks.types.get_subrange_bounds)
7815 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7816 else
7818 low = TYPE_MIN_VALUE (type);
7819 high = TYPE_MAX_VALUE (type);
7822 /* If the type and its base type have the same representation and the same
7823 name, then the type is not a subrange but a copy of the base type. */
7824 if ((TREE_CODE (base_type) == INTEGER_TYPE
7825 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7826 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7827 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7828 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7829 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7830 return false;
7832 if (lowval)
7833 *lowval = low;
7834 if (highval)
7835 *highval = high;
7836 return true;
7839 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7840 and number of elements specified by the range of values of INDEX_TYPE.
7841 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7842 If SHARED is true, reuse such a type that has already been constructed. */
7844 static tree
7845 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7846 bool shared)
7848 tree t;
7850 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7852 error ("arrays of functions are not meaningful");
7853 elt_type = integer_type_node;
7856 t = make_node (ARRAY_TYPE);
7857 TREE_TYPE (t) = elt_type;
7858 TYPE_DOMAIN (t) = index_type;
7859 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7860 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7861 layout_type (t);
7863 /* If the element type is incomplete at this point we get marked for
7864 structural equality. Do not record these types in the canonical
7865 type hashtable. */
7866 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7867 return t;
7869 if (shared)
7871 hashval_t hash = type_hash_canon_hash (t);
7872 t = type_hash_canon (hash, t);
7875 if (TYPE_CANONICAL (t) == t)
7877 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7878 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7879 || in_lto_p)
7880 SET_TYPE_STRUCTURAL_EQUALITY (t);
7881 else if (TYPE_CANONICAL (elt_type) != elt_type
7882 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7883 TYPE_CANONICAL (t)
7884 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7885 index_type
7886 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7887 typeless_storage, shared);
7890 return t;
7893 /* Wrapper around build_array_type_1 with SHARED set to true. */
7895 tree
7896 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7898 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7901 /* Wrapper around build_array_type_1 with SHARED set to false. */
7903 tree
7904 build_nonshared_array_type (tree elt_type, tree index_type)
7906 return build_array_type_1 (elt_type, index_type, false, false);
7909 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7910 sizetype. */
7912 tree
7913 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7915 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7918 /* Recursively examines the array elements of TYPE, until a non-array
7919 element type is found. */
7921 tree
7922 strip_array_types (tree type)
7924 while (TREE_CODE (type) == ARRAY_TYPE)
7925 type = TREE_TYPE (type);
7927 return type;
7930 /* Computes the canonical argument types from the argument type list
7931 ARGTYPES.
7933 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7934 on entry to this function, or if any of the ARGTYPES are
7935 structural.
7937 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7938 true on entry to this function, or if any of the ARGTYPES are
7939 non-canonical.
7941 Returns a canonical argument list, which may be ARGTYPES when the
7942 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7943 true) or would not differ from ARGTYPES. */
7945 static tree
7946 maybe_canonicalize_argtypes (tree argtypes,
7947 bool *any_structural_p,
7948 bool *any_noncanonical_p)
7950 tree arg;
7951 bool any_noncanonical_argtypes_p = false;
7953 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7955 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7956 /* Fail gracefully by stating that the type is structural. */
7957 *any_structural_p = true;
7958 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7959 *any_structural_p = true;
7960 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7961 || TREE_PURPOSE (arg))
7962 /* If the argument has a default argument, we consider it
7963 non-canonical even though the type itself is canonical.
7964 That way, different variants of function and method types
7965 with default arguments will all point to the variant with
7966 no defaults as their canonical type. */
7967 any_noncanonical_argtypes_p = true;
7970 if (*any_structural_p)
7971 return argtypes;
7973 if (any_noncanonical_argtypes_p)
7975 /* Build the canonical list of argument types. */
7976 tree canon_argtypes = NULL_TREE;
7977 bool is_void = false;
7979 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7981 if (arg == void_list_node)
7982 is_void = true;
7983 else
7984 canon_argtypes = tree_cons (NULL_TREE,
7985 TYPE_CANONICAL (TREE_VALUE (arg)),
7986 canon_argtypes);
7989 canon_argtypes = nreverse (canon_argtypes);
7990 if (is_void)
7991 canon_argtypes = chainon (canon_argtypes, void_list_node);
7993 /* There is a non-canonical type. */
7994 *any_noncanonical_p = true;
7995 return canon_argtypes;
7998 /* The canonical argument types are the same as ARGTYPES. */
7999 return argtypes;
8002 /* Construct, lay out and return
8003 the type of functions returning type VALUE_TYPE
8004 given arguments of types ARG_TYPES.
8005 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8006 are data type nodes for the arguments of the function.
8007 If such a type has already been constructed, reuse it. */
8009 tree
8010 build_function_type (tree value_type, tree arg_types)
8012 tree t;
8013 inchash::hash hstate;
8014 bool any_structural_p, any_noncanonical_p;
8015 tree canon_argtypes;
8017 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8019 error ("function return type cannot be function");
8020 value_type = integer_type_node;
8023 /* Make a node of the sort we want. */
8024 t = make_node (FUNCTION_TYPE);
8025 TREE_TYPE (t) = value_type;
8026 TYPE_ARG_TYPES (t) = arg_types;
8028 /* If we already have such a type, use the old one. */
8029 hashval_t hash = type_hash_canon_hash (t);
8030 t = type_hash_canon (hash, t);
8032 /* Set up the canonical type. */
8033 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8034 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8035 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8036 &any_structural_p,
8037 &any_noncanonical_p);
8038 if (any_structural_p)
8039 SET_TYPE_STRUCTURAL_EQUALITY (t);
8040 else if (any_noncanonical_p)
8041 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8042 canon_argtypes);
8044 if (!COMPLETE_TYPE_P (t))
8045 layout_type (t);
8046 return t;
8049 /* Build a function type. The RETURN_TYPE is the type returned by the
8050 function. If VAARGS is set, no void_type_node is appended to the
8051 list. ARGP must be always be terminated be a NULL_TREE. */
8053 static tree
8054 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8056 tree t, args, last;
8058 t = va_arg (argp, tree);
8059 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8060 args = tree_cons (NULL_TREE, t, args);
8062 if (vaargs)
8064 last = args;
8065 if (args != NULL_TREE)
8066 args = nreverse (args);
8067 gcc_assert (last != void_list_node);
8069 else if (args == NULL_TREE)
8070 args = void_list_node;
8071 else
8073 last = args;
8074 args = nreverse (args);
8075 TREE_CHAIN (last) = void_list_node;
8077 args = build_function_type (return_type, args);
8079 return args;
8082 /* Build a function type. The RETURN_TYPE is the type returned by the
8083 function. If additional arguments are provided, they are
8084 additional argument types. The list of argument types must always
8085 be terminated by NULL_TREE. */
8087 tree
8088 build_function_type_list (tree return_type, ...)
8090 tree args;
8091 va_list p;
8093 va_start (p, return_type);
8094 args = build_function_type_list_1 (false, return_type, p);
8095 va_end (p);
8096 return args;
8099 /* Build a variable argument function type. The RETURN_TYPE is the
8100 type returned by the function. If additional arguments are provided,
8101 they are additional argument types. The list of argument types must
8102 always be terminated by NULL_TREE. */
8104 tree
8105 build_varargs_function_type_list (tree return_type, ...)
8107 tree args;
8108 va_list p;
8110 va_start (p, return_type);
8111 args = build_function_type_list_1 (true, return_type, p);
8112 va_end (p);
8114 return args;
8117 /* Build a function type. RETURN_TYPE is the type returned by the
8118 function; VAARGS indicates whether the function takes varargs. The
8119 function takes N named arguments, the types of which are provided in
8120 ARG_TYPES. */
8122 static tree
8123 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8124 tree *arg_types)
8126 int i;
8127 tree t = vaargs ? NULL_TREE : void_list_node;
8129 for (i = n - 1; i >= 0; i--)
8130 t = tree_cons (NULL_TREE, arg_types[i], t);
8132 return build_function_type (return_type, t);
8135 /* Build a function type. RETURN_TYPE is the type returned by the
8136 function. The function takes N named arguments, the types of which
8137 are provided in ARG_TYPES. */
8139 tree
8140 build_function_type_array (tree return_type, int n, tree *arg_types)
8142 return build_function_type_array_1 (false, return_type, n, arg_types);
8145 /* Build a variable argument function type. RETURN_TYPE is the type
8146 returned by the function. The function takes N named arguments, the
8147 types of which are provided in ARG_TYPES. */
8149 tree
8150 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8152 return build_function_type_array_1 (true, return_type, n, arg_types);
8155 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8156 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8157 for the method. An implicit additional parameter (of type
8158 pointer-to-BASETYPE) is added to the ARGTYPES. */
8160 tree
8161 build_method_type_directly (tree basetype,
8162 tree rettype,
8163 tree argtypes)
8165 tree t;
8166 tree ptype;
8167 bool any_structural_p, any_noncanonical_p;
8168 tree canon_argtypes;
8170 /* Make a node of the sort we want. */
8171 t = make_node (METHOD_TYPE);
8173 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8174 TREE_TYPE (t) = rettype;
8175 ptype = build_pointer_type (basetype);
8177 /* The actual arglist for this function includes a "hidden" argument
8178 which is "this". Put it into the list of argument types. */
8179 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8180 TYPE_ARG_TYPES (t) = argtypes;
8182 /* If we already have such a type, use the old one. */
8183 hashval_t hash = type_hash_canon_hash (t);
8184 t = type_hash_canon (hash, t);
8186 /* Set up the canonical type. */
8187 any_structural_p
8188 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8189 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8190 any_noncanonical_p
8191 = (TYPE_CANONICAL (basetype) != basetype
8192 || TYPE_CANONICAL (rettype) != rettype);
8193 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8194 &any_structural_p,
8195 &any_noncanonical_p);
8196 if (any_structural_p)
8197 SET_TYPE_STRUCTURAL_EQUALITY (t);
8198 else if (any_noncanonical_p)
8199 TYPE_CANONICAL (t)
8200 = build_method_type_directly (TYPE_CANONICAL (basetype),
8201 TYPE_CANONICAL (rettype),
8202 canon_argtypes);
8203 if (!COMPLETE_TYPE_P (t))
8204 layout_type (t);
8206 return t;
8209 /* Construct, lay out and return the type of methods belonging to class
8210 BASETYPE and whose arguments and values are described by TYPE.
8211 If that type exists already, reuse it.
8212 TYPE must be a FUNCTION_TYPE node. */
8214 tree
8215 build_method_type (tree basetype, tree type)
8217 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8219 return build_method_type_directly (basetype,
8220 TREE_TYPE (type),
8221 TYPE_ARG_TYPES (type));
8224 /* Construct, lay out and return the type of offsets to a value
8225 of type TYPE, within an object of type BASETYPE.
8226 If a suitable offset type exists already, reuse it. */
8228 tree
8229 build_offset_type (tree basetype, tree type)
8231 tree t;
8233 /* Make a node of the sort we want. */
8234 t = make_node (OFFSET_TYPE);
8236 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8237 TREE_TYPE (t) = type;
8239 /* If we already have such a type, use the old one. */
8240 hashval_t hash = type_hash_canon_hash (t);
8241 t = type_hash_canon (hash, t);
8243 if (!COMPLETE_TYPE_P (t))
8244 layout_type (t);
8246 if (TYPE_CANONICAL (t) == t)
8248 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8249 || TYPE_STRUCTURAL_EQUALITY_P (type))
8250 SET_TYPE_STRUCTURAL_EQUALITY (t);
8251 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8252 || TYPE_CANONICAL (type) != type)
8253 TYPE_CANONICAL (t)
8254 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8255 TYPE_CANONICAL (type));
8258 return t;
8261 /* Create a complex type whose components are COMPONENT_TYPE.
8263 If NAMED is true, the type is given a TYPE_NAME. We do not always
8264 do so because this creates a DECL node and thus make the DECL_UIDs
8265 dependent on the type canonicalization hashtable, which is GC-ed,
8266 so the DECL_UIDs would not be stable wrt garbage collection. */
8268 tree
8269 build_complex_type (tree component_type, bool named)
8271 gcc_assert (INTEGRAL_TYPE_P (component_type)
8272 || SCALAR_FLOAT_TYPE_P (component_type)
8273 || FIXED_POINT_TYPE_P (component_type));
8275 /* Make a node of the sort we want. */
8276 tree probe = make_node (COMPLEX_TYPE);
8278 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8280 /* If we already have such a type, use the old one. */
8281 hashval_t hash = type_hash_canon_hash (probe);
8282 tree t = type_hash_canon (hash, probe);
8284 if (t == probe)
8286 /* We created a new type. The hash insertion will have laid
8287 out the type. We need to check the canonicalization and
8288 maybe set the name. */
8289 gcc_checking_assert (COMPLETE_TYPE_P (t)
8290 && !TYPE_NAME (t)
8291 && TYPE_CANONICAL (t) == t);
8293 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8294 SET_TYPE_STRUCTURAL_EQUALITY (t);
8295 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8296 TYPE_CANONICAL (t)
8297 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8299 /* We need to create a name, since complex is a fundamental type. */
8300 if (named)
8302 const char *name = NULL;
8304 if (TREE_TYPE (t) == char_type_node)
8305 name = "complex char";
8306 else if (TREE_TYPE (t) == signed_char_type_node)
8307 name = "complex signed char";
8308 else if (TREE_TYPE (t) == unsigned_char_type_node)
8309 name = "complex unsigned char";
8310 else if (TREE_TYPE (t) == short_integer_type_node)
8311 name = "complex short int";
8312 else if (TREE_TYPE (t) == short_unsigned_type_node)
8313 name = "complex short unsigned int";
8314 else if (TREE_TYPE (t) == integer_type_node)
8315 name = "complex int";
8316 else if (TREE_TYPE (t) == unsigned_type_node)
8317 name = "complex unsigned int";
8318 else if (TREE_TYPE (t) == long_integer_type_node)
8319 name = "complex long int";
8320 else if (TREE_TYPE (t) == long_unsigned_type_node)
8321 name = "complex long unsigned int";
8322 else if (TREE_TYPE (t) == long_long_integer_type_node)
8323 name = "complex long long int";
8324 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8325 name = "complex long long unsigned int";
8327 if (name != NULL)
8328 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8329 get_identifier (name), t);
8333 return build_qualified_type (t, TYPE_QUALS (component_type));
8336 /* If TYPE is a real or complex floating-point type and the target
8337 does not directly support arithmetic on TYPE then return the wider
8338 type to be used for arithmetic on TYPE. Otherwise, return
8339 NULL_TREE. */
8341 tree
8342 excess_precision_type (tree type)
8344 /* The target can give two different responses to the question of
8345 which excess precision mode it would like depending on whether we
8346 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8348 enum excess_precision_type requested_type
8349 = (flag_excess_precision == EXCESS_PRECISION_FAST
8350 ? EXCESS_PRECISION_TYPE_FAST
8351 : EXCESS_PRECISION_TYPE_STANDARD);
8353 enum flt_eval_method target_flt_eval_method
8354 = targetm.c.excess_precision (requested_type);
8356 /* The target should not ask for unpredictable float evaluation (though
8357 it might advertise that implicitly the evaluation is unpredictable,
8358 but we don't care about that here, it will have been reported
8359 elsewhere). If it does ask for unpredictable evaluation, we have
8360 nothing to do here. */
8361 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8363 /* Nothing to do. The target has asked for all types we know about
8364 to be computed with their native precision and range. */
8365 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8366 return NULL_TREE;
8368 /* The target will promote this type in a target-dependent way, so excess
8369 precision ought to leave it alone. */
8370 if (targetm.promoted_type (type) != NULL_TREE)
8371 return NULL_TREE;
8373 machine_mode float16_type_mode = (float16_type_node
8374 ? TYPE_MODE (float16_type_node)
8375 : VOIDmode);
8376 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8377 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8379 switch (TREE_CODE (type))
8381 case REAL_TYPE:
8383 machine_mode type_mode = TYPE_MODE (type);
8384 switch (target_flt_eval_method)
8386 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8387 if (type_mode == float16_type_mode)
8388 return float_type_node;
8389 break;
8390 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8391 if (type_mode == float16_type_mode
8392 || type_mode == float_type_mode)
8393 return double_type_node;
8394 break;
8395 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8396 if (type_mode == float16_type_mode
8397 || type_mode == float_type_mode
8398 || type_mode == double_type_mode)
8399 return long_double_type_node;
8400 break;
8401 default:
8402 gcc_unreachable ();
8404 break;
8406 case COMPLEX_TYPE:
8408 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8409 return NULL_TREE;
8410 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8411 switch (target_flt_eval_method)
8413 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8414 if (type_mode == float16_type_mode)
8415 return complex_float_type_node;
8416 break;
8417 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8418 if (type_mode == float16_type_mode
8419 || type_mode == float_type_mode)
8420 return complex_double_type_node;
8421 break;
8422 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8423 if (type_mode == float16_type_mode
8424 || type_mode == float_type_mode
8425 || type_mode == double_type_mode)
8426 return complex_long_double_type_node;
8427 break;
8428 default:
8429 gcc_unreachable ();
8431 break;
8433 default:
8434 break;
8437 return NULL_TREE;
8440 /* Return OP, stripped of any conversions to wider types as much as is safe.
8441 Converting the value back to OP's type makes a value equivalent to OP.
8443 If FOR_TYPE is nonzero, we return a value which, if converted to
8444 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8446 OP must have integer, real or enumeral type. Pointers are not allowed!
8448 There are some cases where the obvious value we could return
8449 would regenerate to OP if converted to OP's type,
8450 but would not extend like OP to wider types.
8451 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8452 For example, if OP is (unsigned short)(signed char)-1,
8453 we avoid returning (signed char)-1 if FOR_TYPE is int,
8454 even though extending that to an unsigned short would regenerate OP,
8455 since the result of extending (signed char)-1 to (int)
8456 is different from (int) OP. */
8458 tree
8459 get_unwidened (tree op, tree for_type)
8461 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8462 tree type = TREE_TYPE (op);
8463 unsigned final_prec
8464 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8465 int uns
8466 = (for_type != 0 && for_type != type
8467 && final_prec > TYPE_PRECISION (type)
8468 && TYPE_UNSIGNED (type));
8469 tree win = op;
8471 while (CONVERT_EXPR_P (op))
8473 int bitschange;
8475 /* TYPE_PRECISION on vector types has different meaning
8476 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8477 so avoid them here. */
8478 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8479 break;
8481 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8482 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8484 /* Truncations are many-one so cannot be removed.
8485 Unless we are later going to truncate down even farther. */
8486 if (bitschange < 0
8487 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8488 break;
8490 /* See what's inside this conversion. If we decide to strip it,
8491 we will set WIN. */
8492 op = TREE_OPERAND (op, 0);
8494 /* If we have not stripped any zero-extensions (uns is 0),
8495 we can strip any kind of extension.
8496 If we have previously stripped a zero-extension,
8497 only zero-extensions can safely be stripped.
8498 Any extension can be stripped if the bits it would produce
8499 are all going to be discarded later by truncating to FOR_TYPE. */
8501 if (bitschange > 0)
8503 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8504 win = op;
8505 /* TYPE_UNSIGNED says whether this is a zero-extension.
8506 Let's avoid computing it if it does not affect WIN
8507 and if UNS will not be needed again. */
8508 if ((uns
8509 || CONVERT_EXPR_P (op))
8510 && TYPE_UNSIGNED (TREE_TYPE (op)))
8512 uns = 1;
8513 win = op;
8518 /* If we finally reach a constant see if it fits in sth smaller and
8519 in that case convert it. */
8520 if (TREE_CODE (win) == INTEGER_CST)
8522 tree wtype = TREE_TYPE (win);
8523 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8524 if (for_type)
8525 prec = MAX (prec, final_prec);
8526 if (prec < TYPE_PRECISION (wtype))
8528 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8529 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8530 win = fold_convert (t, win);
8534 return win;
8537 /* Return OP or a simpler expression for a narrower value
8538 which can be sign-extended or zero-extended to give back OP.
8539 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8540 or 0 if the value should be sign-extended. */
8542 tree
8543 get_narrower (tree op, int *unsignedp_ptr)
8545 int uns = 0;
8546 int first = 1;
8547 tree win = op;
8548 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8550 while (TREE_CODE (op) == NOP_EXPR)
8552 int bitschange
8553 = (TYPE_PRECISION (TREE_TYPE (op))
8554 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8556 /* Truncations are many-one so cannot be removed. */
8557 if (bitschange < 0)
8558 break;
8560 /* See what's inside this conversion. If we decide to strip it,
8561 we will set WIN. */
8563 if (bitschange > 0)
8565 op = TREE_OPERAND (op, 0);
8566 /* An extension: the outermost one can be stripped,
8567 but remember whether it is zero or sign extension. */
8568 if (first)
8569 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8570 /* Otherwise, if a sign extension has been stripped,
8571 only sign extensions can now be stripped;
8572 if a zero extension has been stripped, only zero-extensions. */
8573 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8574 break;
8575 first = 0;
8577 else /* bitschange == 0 */
8579 /* A change in nominal type can always be stripped, but we must
8580 preserve the unsignedness. */
8581 if (first)
8582 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8583 first = 0;
8584 op = TREE_OPERAND (op, 0);
8585 /* Keep trying to narrow, but don't assign op to win if it
8586 would turn an integral type into something else. */
8587 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8588 continue;
8591 win = op;
8594 if (TREE_CODE (op) == COMPONENT_REF
8595 /* Since type_for_size always gives an integer type. */
8596 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8597 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8598 /* Ensure field is laid out already. */
8599 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8600 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8602 unsigned HOST_WIDE_INT innerprec
8603 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8604 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8605 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8606 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8608 /* We can get this structure field in a narrower type that fits it,
8609 but the resulting extension to its nominal type (a fullword type)
8610 must satisfy the same conditions as for other extensions.
8612 Do this only for fields that are aligned (not bit-fields),
8613 because when bit-field insns will be used there is no
8614 advantage in doing this. */
8616 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8617 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8618 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8619 && type != 0)
8621 if (first)
8622 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8623 win = fold_convert (type, op);
8627 *unsignedp_ptr = uns;
8628 return win;
8631 /* Return true if integer constant C has a value that is permissible
8632 for TYPE, an integral type. */
8634 bool
8635 int_fits_type_p (const_tree c, const_tree type)
8637 tree type_low_bound, type_high_bound;
8638 bool ok_for_low_bound, ok_for_high_bound;
8639 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8641 /* Non-standard boolean types can have arbitrary precision but various
8642 transformations assume that they can only take values 0 and +/-1. */
8643 if (TREE_CODE (type) == BOOLEAN_TYPE)
8644 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8646 retry:
8647 type_low_bound = TYPE_MIN_VALUE (type);
8648 type_high_bound = TYPE_MAX_VALUE (type);
8650 /* If at least one bound of the type is a constant integer, we can check
8651 ourselves and maybe make a decision. If no such decision is possible, but
8652 this type is a subtype, try checking against that. Otherwise, use
8653 fits_to_tree_p, which checks against the precision.
8655 Compute the status for each possibly constant bound, and return if we see
8656 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8657 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8658 for "constant known to fit". */
8660 /* Check if c >= type_low_bound. */
8661 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8663 if (tree_int_cst_lt (c, type_low_bound))
8664 return false;
8665 ok_for_low_bound = true;
8667 else
8668 ok_for_low_bound = false;
8670 /* Check if c <= type_high_bound. */
8671 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8673 if (tree_int_cst_lt (type_high_bound, c))
8674 return false;
8675 ok_for_high_bound = true;
8677 else
8678 ok_for_high_bound = false;
8680 /* If the constant fits both bounds, the result is known. */
8681 if (ok_for_low_bound && ok_for_high_bound)
8682 return true;
8684 /* Perform some generic filtering which may allow making a decision
8685 even if the bounds are not constant. First, negative integers
8686 never fit in unsigned types, */
8687 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8688 return false;
8690 /* Second, narrower types always fit in wider ones. */
8691 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8692 return true;
8694 /* Third, unsigned integers with top bit set never fit signed types. */
8695 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8697 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8698 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8700 /* When a tree_cst is converted to a wide-int, the precision
8701 is taken from the type. However, if the precision of the
8702 mode underneath the type is smaller than that, it is
8703 possible that the value will not fit. The test below
8704 fails if any bit is set between the sign bit of the
8705 underlying mode and the top bit of the type. */
8706 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8707 return false;
8709 else if (wi::neg_p (wi::to_wide (c)))
8710 return false;
8713 /* If we haven't been able to decide at this point, there nothing more we
8714 can check ourselves here. Look at the base type if we have one and it
8715 has the same precision. */
8716 if (TREE_CODE (type) == INTEGER_TYPE
8717 && TREE_TYPE (type) != 0
8718 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8720 type = TREE_TYPE (type);
8721 goto retry;
8724 /* Or to fits_to_tree_p, if nothing else. */
8725 return wi::fits_to_tree_p (wi::to_wide (c), type);
8728 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8729 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8730 represented (assuming two's-complement arithmetic) within the bit
8731 precision of the type are returned instead. */
8733 void
8734 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8736 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8737 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8738 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8739 else
8741 if (TYPE_UNSIGNED (type))
8742 mpz_set_ui (min, 0);
8743 else
8745 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8746 wi::to_mpz (mn, min, SIGNED);
8750 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8751 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8752 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8753 else
8755 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8756 wi::to_mpz (mn, max, TYPE_SIGN (type));
8760 /* Return true if VAR is an automatic variable defined in function FN. */
8762 bool
8763 auto_var_in_fn_p (const_tree var, const_tree fn)
8765 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8766 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8767 || TREE_CODE (var) == PARM_DECL)
8768 && ! TREE_STATIC (var))
8769 || TREE_CODE (var) == LABEL_DECL
8770 || TREE_CODE (var) == RESULT_DECL));
8773 /* Subprogram of following function. Called by walk_tree.
8775 Return *TP if it is an automatic variable or parameter of the
8776 function passed in as DATA. */
8778 static tree
8779 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8781 tree fn = (tree) data;
8783 if (TYPE_P (*tp))
8784 *walk_subtrees = 0;
8786 else if (DECL_P (*tp)
8787 && auto_var_in_fn_p (*tp, fn))
8788 return *tp;
8790 return NULL_TREE;
8793 /* Returns true if T is, contains, or refers to a type with variable
8794 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8795 arguments, but not the return type. If FN is nonzero, only return
8796 true if a modifier of the type or position of FN is a variable or
8797 parameter inside FN.
8799 This concept is more general than that of C99 'variably modified types':
8800 in C99, a struct type is never variably modified because a VLA may not
8801 appear as a structure member. However, in GNU C code like:
8803 struct S { int i[f()]; };
8805 is valid, and other languages may define similar constructs. */
8807 bool
8808 variably_modified_type_p (tree type, tree fn)
8810 tree t;
8812 /* Test if T is either variable (if FN is zero) or an expression containing
8813 a variable in FN. If TYPE isn't gimplified, return true also if
8814 gimplify_one_sizepos would gimplify the expression into a local
8815 variable. */
8816 #define RETURN_TRUE_IF_VAR(T) \
8817 do { tree _t = (T); \
8818 if (_t != NULL_TREE \
8819 && _t != error_mark_node \
8820 && !CONSTANT_CLASS_P (_t) \
8821 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8822 && (!fn \
8823 || (!TYPE_SIZES_GIMPLIFIED (type) \
8824 && (TREE_CODE (_t) != VAR_DECL \
8825 && !CONTAINS_PLACEHOLDER_P (_t))) \
8826 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8827 return true; } while (0)
8829 if (type == error_mark_node)
8830 return false;
8832 /* If TYPE itself has variable size, it is variably modified. */
8833 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8834 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8836 switch (TREE_CODE (type))
8838 case POINTER_TYPE:
8839 case REFERENCE_TYPE:
8840 case VECTOR_TYPE:
8841 /* Ada can have pointer types refering to themselves indirectly. */
8842 if (TREE_VISITED (type))
8843 return false;
8844 TREE_VISITED (type) = true;
8845 if (variably_modified_type_p (TREE_TYPE (type), fn))
8847 TREE_VISITED (type) = false;
8848 return true;
8850 TREE_VISITED (type) = false;
8851 break;
8853 case FUNCTION_TYPE:
8854 case METHOD_TYPE:
8855 /* If TYPE is a function type, it is variably modified if the
8856 return type is variably modified. */
8857 if (variably_modified_type_p (TREE_TYPE (type), fn))
8858 return true;
8859 break;
8861 case INTEGER_TYPE:
8862 case REAL_TYPE:
8863 case FIXED_POINT_TYPE:
8864 case ENUMERAL_TYPE:
8865 case BOOLEAN_TYPE:
8866 /* Scalar types are variably modified if their end points
8867 aren't constant. */
8868 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8869 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8870 break;
8872 case RECORD_TYPE:
8873 case UNION_TYPE:
8874 case QUAL_UNION_TYPE:
8875 /* We can't see if any of the fields are variably-modified by the
8876 definition we normally use, since that would produce infinite
8877 recursion via pointers. */
8878 /* This is variably modified if some field's type is. */
8879 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8880 if (TREE_CODE (t) == FIELD_DECL)
8882 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8883 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8884 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8886 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8887 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8889 break;
8891 case ARRAY_TYPE:
8892 /* Do not call ourselves to avoid infinite recursion. This is
8893 variably modified if the element type is. */
8894 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8895 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8896 break;
8898 default:
8899 break;
8902 /* The current language may have other cases to check, but in general,
8903 all other types are not variably modified. */
8904 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8906 #undef RETURN_TRUE_IF_VAR
8909 /* Given a DECL or TYPE, return the scope in which it was declared, or
8910 NULL_TREE if there is no containing scope. */
8912 tree
8913 get_containing_scope (const_tree t)
8915 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8918 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8920 const_tree
8921 get_ultimate_context (const_tree decl)
8923 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8925 if (TREE_CODE (decl) == BLOCK)
8926 decl = BLOCK_SUPERCONTEXT (decl);
8927 else
8928 decl = get_containing_scope (decl);
8930 return decl;
8933 /* Return the innermost context enclosing DECL that is
8934 a FUNCTION_DECL, or zero if none. */
8936 tree
8937 decl_function_context (const_tree decl)
8939 tree context;
8941 if (TREE_CODE (decl) == ERROR_MARK)
8942 return 0;
8944 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8945 where we look up the function at runtime. Such functions always take
8946 a first argument of type 'pointer to real context'.
8948 C++ should really be fixed to use DECL_CONTEXT for the real context,
8949 and use something else for the "virtual context". */
8950 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8951 context
8952 = TYPE_MAIN_VARIANT
8953 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8954 else
8955 context = DECL_CONTEXT (decl);
8957 while (context && TREE_CODE (context) != FUNCTION_DECL)
8959 if (TREE_CODE (context) == BLOCK)
8960 context = BLOCK_SUPERCONTEXT (context);
8961 else
8962 context = get_containing_scope (context);
8965 return context;
8968 /* Return the innermost context enclosing DECL that is
8969 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8970 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8972 tree
8973 decl_type_context (const_tree decl)
8975 tree context = DECL_CONTEXT (decl);
8977 while (context)
8978 switch (TREE_CODE (context))
8980 case NAMESPACE_DECL:
8981 case TRANSLATION_UNIT_DECL:
8982 return NULL_TREE;
8984 case RECORD_TYPE:
8985 case UNION_TYPE:
8986 case QUAL_UNION_TYPE:
8987 return context;
8989 case TYPE_DECL:
8990 case FUNCTION_DECL:
8991 context = DECL_CONTEXT (context);
8992 break;
8994 case BLOCK:
8995 context = BLOCK_SUPERCONTEXT (context);
8996 break;
8998 default:
8999 gcc_unreachable ();
9002 return NULL_TREE;
9005 /* CALL is a CALL_EXPR. Return the declaration for the function
9006 called, or NULL_TREE if the called function cannot be
9007 determined. */
9009 tree
9010 get_callee_fndecl (const_tree call)
9012 tree addr;
9014 if (call == error_mark_node)
9015 return error_mark_node;
9017 /* It's invalid to call this function with anything but a
9018 CALL_EXPR. */
9019 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9021 /* The first operand to the CALL is the address of the function
9022 called. */
9023 addr = CALL_EXPR_FN (call);
9025 /* If there is no function, return early. */
9026 if (addr == NULL_TREE)
9027 return NULL_TREE;
9029 STRIP_NOPS (addr);
9031 /* If this is a readonly function pointer, extract its initial value. */
9032 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9033 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9034 && DECL_INITIAL (addr))
9035 addr = DECL_INITIAL (addr);
9037 /* If the address is just `&f' for some function `f', then we know
9038 that `f' is being called. */
9039 if (TREE_CODE (addr) == ADDR_EXPR
9040 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9041 return TREE_OPERAND (addr, 0);
9043 /* We couldn't figure out what was being called. */
9044 return NULL_TREE;
9047 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9048 return the associated function code, otherwise return CFN_LAST. */
9050 combined_fn
9051 get_call_combined_fn (const_tree call)
9053 /* It's invalid to call this function with anything but a CALL_EXPR. */
9054 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9056 if (!CALL_EXPR_FN (call))
9057 return as_combined_fn (CALL_EXPR_IFN (call));
9059 tree fndecl = get_callee_fndecl (call);
9060 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9061 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9063 return CFN_LAST;
9066 #define TREE_MEM_USAGE_SPACES 40
9068 /* Print debugging information about tree nodes generated during the compile,
9069 and any language-specific information. */
9071 void
9072 dump_tree_statistics (void)
9074 if (GATHER_STATISTICS)
9076 int i;
9077 uint64_t total_nodes, total_bytes;
9078 fprintf (stderr, "\nKind Nodes Bytes\n");
9079 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9080 total_nodes = total_bytes = 0;
9081 for (i = 0; i < (int) all_kinds; i++)
9083 fprintf (stderr, "%-20s %7" PRIu64 " %10" PRIu64 "\n",
9084 tree_node_kind_names[i], tree_node_counts[i],
9085 tree_node_sizes[i]);
9086 total_nodes += tree_node_counts[i];
9087 total_bytes += tree_node_sizes[i];
9089 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9090 fprintf (stderr, "%-20s %7" PRIu64 " %10" PRIu64 "\n", "Total",
9091 total_nodes, total_bytes);
9092 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9093 fprintf (stderr, "Code Nodes\n");
9094 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9095 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9096 fprintf (stderr, "%-32s %7" PRIu64 "\n",
9097 get_tree_code_name ((enum tree_code) i), tree_code_counts[i]);
9098 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9099 fprintf (stderr, "\n");
9100 ssanames_print_statistics ();
9101 fprintf (stderr, "\n");
9102 phinodes_print_statistics ();
9103 fprintf (stderr, "\n");
9105 else
9106 fprintf (stderr, "(No per-node statistics)\n");
9108 print_type_hash_statistics ();
9109 print_debug_expr_statistics ();
9110 print_value_expr_statistics ();
9111 lang_hooks.print_statistics ();
9114 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9116 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9118 unsigned
9119 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9121 /* This relies on the raw feedback's top 4 bits being zero. */
9122 #define FEEDBACK(X) ((X) * 0x04c11db7)
9123 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9124 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9125 static const unsigned syndromes[16] =
9127 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9128 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9129 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9130 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9132 #undef FEEDBACK
9133 #undef SYNDROME
9135 value <<= (32 - bytes * 8);
9136 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9138 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9140 chksum = (chksum << 4) ^ feedback;
9143 return chksum;
9146 /* Generate a crc32 of a string. */
9148 unsigned
9149 crc32_string (unsigned chksum, const char *string)
9152 chksum = crc32_byte (chksum, *string);
9153 while (*string++);
9154 return chksum;
9157 /* P is a string that will be used in a symbol. Mask out any characters
9158 that are not valid in that context. */
9160 void
9161 clean_symbol_name (char *p)
9163 for (; *p; p++)
9164 if (! (ISALNUM (*p)
9165 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9166 || *p == '$'
9167 #endif
9168 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9169 || *p == '.'
9170 #endif
9172 *p = '_';
9175 /* For anonymous aggregate types, we need some sort of name to
9176 hold on to. In practice, this should not appear, but it should
9177 not be harmful if it does. */
9178 bool
9179 anon_aggrname_p(const_tree id_node)
9181 #ifndef NO_DOT_IN_LABEL
9182 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9183 && IDENTIFIER_POINTER (id_node)[1] == '_');
9184 #else /* NO_DOT_IN_LABEL */
9185 #ifndef NO_DOLLAR_IN_LABEL
9186 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9187 && IDENTIFIER_POINTER (id_node)[1] == '_');
9188 #else /* NO_DOLLAR_IN_LABEL */
9189 #define ANON_AGGRNAME_PREFIX "__anon_"
9190 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9191 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9192 #endif /* NO_DOLLAR_IN_LABEL */
9193 #endif /* NO_DOT_IN_LABEL */
9196 /* Return a format for an anonymous aggregate name. */
9197 const char *
9198 anon_aggrname_format()
9200 #ifndef NO_DOT_IN_LABEL
9201 return "._%d";
9202 #else /* NO_DOT_IN_LABEL */
9203 #ifndef NO_DOLLAR_IN_LABEL
9204 return "$_%d";
9205 #else /* NO_DOLLAR_IN_LABEL */
9206 return "__anon_%d";
9207 #endif /* NO_DOLLAR_IN_LABEL */
9208 #endif /* NO_DOT_IN_LABEL */
9211 /* Generate a name for a special-purpose function.
9212 The generated name may need to be unique across the whole link.
9213 Changes to this function may also require corresponding changes to
9214 xstrdup_mask_random.
9215 TYPE is some string to identify the purpose of this function to the
9216 linker or collect2; it must start with an uppercase letter,
9217 one of:
9218 I - for constructors
9219 D - for destructors
9220 N - for C++ anonymous namespaces
9221 F - for DWARF unwind frame information. */
9223 tree
9224 get_file_function_name (const char *type)
9226 char *buf;
9227 const char *p;
9228 char *q;
9230 /* If we already have a name we know to be unique, just use that. */
9231 if (first_global_object_name)
9232 p = q = ASTRDUP (first_global_object_name);
9233 /* If the target is handling the constructors/destructors, they
9234 will be local to this file and the name is only necessary for
9235 debugging purposes.
9236 We also assign sub_I and sub_D sufixes to constructors called from
9237 the global static constructors. These are always local. */
9238 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9239 || (strncmp (type, "sub_", 4) == 0
9240 && (type[4] == 'I' || type[4] == 'D')))
9242 const char *file = main_input_filename;
9243 if (! file)
9244 file = LOCATION_FILE (input_location);
9245 /* Just use the file's basename, because the full pathname
9246 might be quite long. */
9247 p = q = ASTRDUP (lbasename (file));
9249 else
9251 /* Otherwise, the name must be unique across the entire link.
9252 We don't have anything that we know to be unique to this translation
9253 unit, so use what we do have and throw in some randomness. */
9254 unsigned len;
9255 const char *name = weak_global_object_name;
9256 const char *file = main_input_filename;
9258 if (! name)
9259 name = "";
9260 if (! file)
9261 file = LOCATION_FILE (input_location);
9263 len = strlen (file);
9264 q = (char *) alloca (9 + 19 + len + 1);
9265 memcpy (q, file, len + 1);
9267 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9268 crc32_string (0, name), get_random_seed (false));
9270 p = q;
9273 clean_symbol_name (q);
9274 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9275 + strlen (type));
9277 /* Set up the name of the file-level functions we may need.
9278 Use a global object (which is already required to be unique over
9279 the program) rather than the file name (which imposes extra
9280 constraints). */
9281 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9283 return get_identifier (buf);
9286 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9288 /* Complain that the tree code of NODE does not match the expected 0
9289 terminated list of trailing codes. The trailing code list can be
9290 empty, for a more vague error message. FILE, LINE, and FUNCTION
9291 are of the caller. */
9293 void
9294 tree_check_failed (const_tree node, const char *file,
9295 int line, const char *function, ...)
9297 va_list args;
9298 const char *buffer;
9299 unsigned length = 0;
9300 enum tree_code code;
9302 va_start (args, function);
9303 while ((code = (enum tree_code) va_arg (args, int)))
9304 length += 4 + strlen (get_tree_code_name (code));
9305 va_end (args);
9306 if (length)
9308 char *tmp;
9309 va_start (args, function);
9310 length += strlen ("expected ");
9311 buffer = tmp = (char *) alloca (length);
9312 length = 0;
9313 while ((code = (enum tree_code) va_arg (args, int)))
9315 const char *prefix = length ? " or " : "expected ";
9317 strcpy (tmp + length, prefix);
9318 length += strlen (prefix);
9319 strcpy (tmp + length, get_tree_code_name (code));
9320 length += strlen (get_tree_code_name (code));
9322 va_end (args);
9324 else
9325 buffer = "unexpected node";
9327 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9328 buffer, get_tree_code_name (TREE_CODE (node)),
9329 function, trim_filename (file), line);
9332 /* Complain that the tree code of NODE does match the expected 0
9333 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9334 the caller. */
9336 void
9337 tree_not_check_failed (const_tree node, const char *file,
9338 int line, const char *function, ...)
9340 va_list args;
9341 char *buffer;
9342 unsigned length = 0;
9343 enum tree_code code;
9345 va_start (args, function);
9346 while ((code = (enum tree_code) va_arg (args, int)))
9347 length += 4 + strlen (get_tree_code_name (code));
9348 va_end (args);
9349 va_start (args, function);
9350 buffer = (char *) alloca (length);
9351 length = 0;
9352 while ((code = (enum tree_code) va_arg (args, int)))
9354 if (length)
9356 strcpy (buffer + length, " or ");
9357 length += 4;
9359 strcpy (buffer + length, get_tree_code_name (code));
9360 length += strlen (get_tree_code_name (code));
9362 va_end (args);
9364 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9365 buffer, get_tree_code_name (TREE_CODE (node)),
9366 function, trim_filename (file), line);
9369 /* Similar to tree_check_failed, except that we check for a class of tree
9370 code, given in CL. */
9372 void
9373 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9374 const char *file, int line, const char *function)
9376 internal_error
9377 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9378 TREE_CODE_CLASS_STRING (cl),
9379 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9380 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9383 /* Similar to tree_check_failed, except that instead of specifying a
9384 dozen codes, use the knowledge that they're all sequential. */
9386 void
9387 tree_range_check_failed (const_tree node, const char *file, int line,
9388 const char *function, enum tree_code c1,
9389 enum tree_code c2)
9391 char *buffer;
9392 unsigned length = 0;
9393 unsigned int c;
9395 for (c = c1; c <= c2; ++c)
9396 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9398 length += strlen ("expected ");
9399 buffer = (char *) alloca (length);
9400 length = 0;
9402 for (c = c1; c <= c2; ++c)
9404 const char *prefix = length ? " or " : "expected ";
9406 strcpy (buffer + length, prefix);
9407 length += strlen (prefix);
9408 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9409 length += strlen (get_tree_code_name ((enum tree_code) c));
9412 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9413 buffer, get_tree_code_name (TREE_CODE (node)),
9414 function, trim_filename (file), line);
9418 /* Similar to tree_check_failed, except that we check that a tree does
9419 not have the specified code, given in CL. */
9421 void
9422 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9423 const char *file, int line, const char *function)
9425 internal_error
9426 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9427 TREE_CODE_CLASS_STRING (cl),
9428 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9429 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9433 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9435 void
9436 omp_clause_check_failed (const_tree node, const char *file, int line,
9437 const char *function, enum omp_clause_code code)
9439 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9440 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9441 function, trim_filename (file), line);
9445 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9447 void
9448 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9449 const char *function, enum omp_clause_code c1,
9450 enum omp_clause_code c2)
9452 char *buffer;
9453 unsigned length = 0;
9454 unsigned int c;
9456 for (c = c1; c <= c2; ++c)
9457 length += 4 + strlen (omp_clause_code_name[c]);
9459 length += strlen ("expected ");
9460 buffer = (char *) alloca (length);
9461 length = 0;
9463 for (c = c1; c <= c2; ++c)
9465 const char *prefix = length ? " or " : "expected ";
9467 strcpy (buffer + length, prefix);
9468 length += strlen (prefix);
9469 strcpy (buffer + length, omp_clause_code_name[c]);
9470 length += strlen (omp_clause_code_name[c]);
9473 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9474 buffer, omp_clause_code_name[TREE_CODE (node)],
9475 function, trim_filename (file), line);
9479 #undef DEFTREESTRUCT
9480 #define DEFTREESTRUCT(VAL, NAME) NAME,
9482 static const char *ts_enum_names[] = {
9483 #include "treestruct.def"
9485 #undef DEFTREESTRUCT
9487 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9489 /* Similar to tree_class_check_failed, except that we check for
9490 whether CODE contains the tree structure identified by EN. */
9492 void
9493 tree_contains_struct_check_failed (const_tree node,
9494 const enum tree_node_structure_enum en,
9495 const char *file, int line,
9496 const char *function)
9498 internal_error
9499 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9500 TS_ENUM_NAME (en),
9501 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9505 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9506 (dynamically sized) vector. */
9508 void
9509 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9510 const char *function)
9512 internal_error
9513 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9514 idx + 1, len, function, trim_filename (file), line);
9517 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9518 (dynamically sized) vector. */
9520 void
9521 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9522 const char *function)
9524 internal_error
9525 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9526 idx + 1, len, function, trim_filename (file), line);
9529 /* Similar to above, except that the check is for the bounds of the operand
9530 vector of an expression node EXP. */
9532 void
9533 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9534 int line, const char *function)
9536 enum tree_code code = TREE_CODE (exp);
9537 internal_error
9538 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9539 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9540 function, trim_filename (file), line);
9543 /* Similar to above, except that the check is for the number of
9544 operands of an OMP_CLAUSE node. */
9546 void
9547 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9548 int line, const char *function)
9550 internal_error
9551 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9552 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9553 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9554 trim_filename (file), line);
9556 #endif /* ENABLE_TREE_CHECKING */
9558 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9559 and mapped to the machine mode MODE. Initialize its fields and build
9560 the information necessary for debugging output. */
9562 static tree
9563 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9565 tree t;
9566 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9568 t = make_node (VECTOR_TYPE);
9569 TREE_TYPE (t) = mv_innertype;
9570 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9571 SET_TYPE_MODE (t, mode);
9573 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9574 SET_TYPE_STRUCTURAL_EQUALITY (t);
9575 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9576 || mode != VOIDmode)
9577 && !VECTOR_BOOLEAN_TYPE_P (t))
9578 TYPE_CANONICAL (t)
9579 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9581 layout_type (t);
9583 hashval_t hash = type_hash_canon_hash (t);
9584 t = type_hash_canon (hash, t);
9586 /* We have built a main variant, based on the main variant of the
9587 inner type. Use it to build the variant we return. */
9588 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9589 && TREE_TYPE (t) != innertype)
9590 return build_type_attribute_qual_variant (t,
9591 TYPE_ATTRIBUTES (innertype),
9592 TYPE_QUALS (innertype));
9594 return t;
9597 static tree
9598 make_or_reuse_type (unsigned size, int unsignedp)
9600 int i;
9602 if (size == INT_TYPE_SIZE)
9603 return unsignedp ? unsigned_type_node : integer_type_node;
9604 if (size == CHAR_TYPE_SIZE)
9605 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9606 if (size == SHORT_TYPE_SIZE)
9607 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9608 if (size == LONG_TYPE_SIZE)
9609 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9610 if (size == LONG_LONG_TYPE_SIZE)
9611 return (unsignedp ? long_long_unsigned_type_node
9612 : long_long_integer_type_node);
9614 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9615 if (size == int_n_data[i].bitsize
9616 && int_n_enabled_p[i])
9617 return (unsignedp ? int_n_trees[i].unsigned_type
9618 : int_n_trees[i].signed_type);
9620 if (unsignedp)
9621 return make_unsigned_type (size);
9622 else
9623 return make_signed_type (size);
9626 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9628 static tree
9629 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9631 if (satp)
9633 if (size == SHORT_FRACT_TYPE_SIZE)
9634 return unsignedp ? sat_unsigned_short_fract_type_node
9635 : sat_short_fract_type_node;
9636 if (size == FRACT_TYPE_SIZE)
9637 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9638 if (size == LONG_FRACT_TYPE_SIZE)
9639 return unsignedp ? sat_unsigned_long_fract_type_node
9640 : sat_long_fract_type_node;
9641 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9642 return unsignedp ? sat_unsigned_long_long_fract_type_node
9643 : sat_long_long_fract_type_node;
9645 else
9647 if (size == SHORT_FRACT_TYPE_SIZE)
9648 return unsignedp ? unsigned_short_fract_type_node
9649 : short_fract_type_node;
9650 if (size == FRACT_TYPE_SIZE)
9651 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9652 if (size == LONG_FRACT_TYPE_SIZE)
9653 return unsignedp ? unsigned_long_fract_type_node
9654 : long_fract_type_node;
9655 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9656 return unsignedp ? unsigned_long_long_fract_type_node
9657 : long_long_fract_type_node;
9660 return make_fract_type (size, unsignedp, satp);
9663 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9665 static tree
9666 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9668 if (satp)
9670 if (size == SHORT_ACCUM_TYPE_SIZE)
9671 return unsignedp ? sat_unsigned_short_accum_type_node
9672 : sat_short_accum_type_node;
9673 if (size == ACCUM_TYPE_SIZE)
9674 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9675 if (size == LONG_ACCUM_TYPE_SIZE)
9676 return unsignedp ? sat_unsigned_long_accum_type_node
9677 : sat_long_accum_type_node;
9678 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9679 return unsignedp ? sat_unsigned_long_long_accum_type_node
9680 : sat_long_long_accum_type_node;
9682 else
9684 if (size == SHORT_ACCUM_TYPE_SIZE)
9685 return unsignedp ? unsigned_short_accum_type_node
9686 : short_accum_type_node;
9687 if (size == ACCUM_TYPE_SIZE)
9688 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9689 if (size == LONG_ACCUM_TYPE_SIZE)
9690 return unsignedp ? unsigned_long_accum_type_node
9691 : long_accum_type_node;
9692 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9693 return unsignedp ? unsigned_long_long_accum_type_node
9694 : long_long_accum_type_node;
9697 return make_accum_type (size, unsignedp, satp);
9701 /* Create an atomic variant node for TYPE. This routine is called
9702 during initialization of data types to create the 5 basic atomic
9703 types. The generic build_variant_type function requires these to
9704 already be set up in order to function properly, so cannot be
9705 called from there. If ALIGN is non-zero, then ensure alignment is
9706 overridden to this value. */
9708 static tree
9709 build_atomic_base (tree type, unsigned int align)
9711 tree t;
9713 /* Make sure its not already registered. */
9714 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9715 return t;
9717 t = build_variant_type_copy (type);
9718 set_type_quals (t, TYPE_QUAL_ATOMIC);
9720 if (align)
9721 SET_TYPE_ALIGN (t, align);
9723 return t;
9726 /* Information about the _FloatN and _FloatNx types. This must be in
9727 the same order as the corresponding TI_* enum values. */
9728 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9730 { 16, false },
9731 { 32, false },
9732 { 64, false },
9733 { 128, false },
9734 { 32, true },
9735 { 64, true },
9736 { 128, true },
9740 /* Create nodes for all integer types (and error_mark_node) using the sizes
9741 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9743 void
9744 build_common_tree_nodes (bool signed_char)
9746 int i;
9748 error_mark_node = make_node (ERROR_MARK);
9749 TREE_TYPE (error_mark_node) = error_mark_node;
9751 initialize_sizetypes ();
9753 /* Define both `signed char' and `unsigned char'. */
9754 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9755 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9756 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9757 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9759 /* Define `char', which is like either `signed char' or `unsigned char'
9760 but not the same as either. */
9761 char_type_node
9762 = (signed_char
9763 ? make_signed_type (CHAR_TYPE_SIZE)
9764 : make_unsigned_type (CHAR_TYPE_SIZE));
9765 TYPE_STRING_FLAG (char_type_node) = 1;
9767 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9768 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9769 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9770 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9771 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9772 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9773 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9774 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9776 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9778 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9779 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9780 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9781 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9783 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9784 && int_n_enabled_p[i])
9786 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9787 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9791 /* Define a boolean type. This type only represents boolean values but
9792 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9793 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9794 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9795 TYPE_PRECISION (boolean_type_node) = 1;
9796 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9798 /* Define what type to use for size_t. */
9799 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9800 size_type_node = unsigned_type_node;
9801 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9802 size_type_node = long_unsigned_type_node;
9803 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9804 size_type_node = long_long_unsigned_type_node;
9805 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9806 size_type_node = short_unsigned_type_node;
9807 else
9809 int i;
9811 size_type_node = NULL_TREE;
9812 for (i = 0; i < NUM_INT_N_ENTS; i++)
9813 if (int_n_enabled_p[i])
9815 char name[50];
9816 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9818 if (strcmp (name, SIZE_TYPE) == 0)
9820 size_type_node = int_n_trees[i].unsigned_type;
9823 if (size_type_node == NULL_TREE)
9824 gcc_unreachable ();
9827 /* Define what type to use for ptrdiff_t. */
9828 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9829 ptrdiff_type_node = integer_type_node;
9830 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9831 ptrdiff_type_node = long_integer_type_node;
9832 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9833 ptrdiff_type_node = long_long_integer_type_node;
9834 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9835 ptrdiff_type_node = short_integer_type_node;
9836 else
9838 ptrdiff_type_node = NULL_TREE;
9839 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9840 if (int_n_enabled_p[i])
9842 char name[50];
9843 sprintf (name, "__int%d", int_n_data[i].bitsize);
9844 if (strcmp (name, PTRDIFF_TYPE) == 0)
9845 ptrdiff_type_node = int_n_trees[i].signed_type;
9847 if (ptrdiff_type_node == NULL_TREE)
9848 gcc_unreachable ();
9851 /* Fill in the rest of the sized types. Reuse existing type nodes
9852 when possible. */
9853 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9854 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9855 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9856 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9857 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9859 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9860 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9861 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9862 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9863 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9865 /* Don't call build_qualified type for atomics. That routine does
9866 special processing for atomics, and until they are initialized
9867 it's better not to make that call.
9869 Check to see if there is a target override for atomic types. */
9871 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9872 targetm.atomic_align_for_mode (QImode));
9873 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9874 targetm.atomic_align_for_mode (HImode));
9875 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9876 targetm.atomic_align_for_mode (SImode));
9877 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9878 targetm.atomic_align_for_mode (DImode));
9879 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9880 targetm.atomic_align_for_mode (TImode));
9882 access_public_node = get_identifier ("public");
9883 access_protected_node = get_identifier ("protected");
9884 access_private_node = get_identifier ("private");
9886 /* Define these next since types below may used them. */
9887 integer_zero_node = build_int_cst (integer_type_node, 0);
9888 integer_one_node = build_int_cst (integer_type_node, 1);
9889 integer_three_node = build_int_cst (integer_type_node, 3);
9890 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9892 size_zero_node = size_int (0);
9893 size_one_node = size_int (1);
9894 bitsize_zero_node = bitsize_int (0);
9895 bitsize_one_node = bitsize_int (1);
9896 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9898 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9899 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9901 void_type_node = make_node (VOID_TYPE);
9902 layout_type (void_type_node);
9904 pointer_bounds_type_node = targetm.chkp_bound_type ();
9906 /* We are not going to have real types in C with less than byte alignment,
9907 so we might as well not have any types that claim to have it. */
9908 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9909 TYPE_USER_ALIGN (void_type_node) = 0;
9911 void_node = make_node (VOID_CST);
9912 TREE_TYPE (void_node) = void_type_node;
9914 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9915 layout_type (TREE_TYPE (null_pointer_node));
9917 ptr_type_node = build_pointer_type (void_type_node);
9918 const_ptr_type_node
9919 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9920 for (unsigned i = 0;
9921 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9922 ++i)
9923 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9925 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9927 float_type_node = make_node (REAL_TYPE);
9928 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9929 layout_type (float_type_node);
9931 double_type_node = make_node (REAL_TYPE);
9932 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9933 layout_type (double_type_node);
9935 long_double_type_node = make_node (REAL_TYPE);
9936 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9937 layout_type (long_double_type_node);
9939 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9941 int n = floatn_nx_types[i].n;
9942 bool extended = floatn_nx_types[i].extended;
9943 scalar_float_mode mode;
9944 if (!targetm.floatn_mode (n, extended).exists (&mode))
9945 continue;
9946 int precision = GET_MODE_PRECISION (mode);
9947 /* Work around the rs6000 KFmode having precision 113 not
9948 128. */
9949 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9950 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9951 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9952 if (!extended)
9953 gcc_assert (min_precision == n);
9954 if (precision < min_precision)
9955 precision = min_precision;
9956 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9957 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9958 layout_type (FLOATN_NX_TYPE_NODE (i));
9959 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9962 float_ptr_type_node = build_pointer_type (float_type_node);
9963 double_ptr_type_node = build_pointer_type (double_type_node);
9964 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9965 integer_ptr_type_node = build_pointer_type (integer_type_node);
9967 /* Fixed size integer types. */
9968 uint16_type_node = make_or_reuse_type (16, 1);
9969 uint32_type_node = make_or_reuse_type (32, 1);
9970 uint64_type_node = make_or_reuse_type (64, 1);
9972 /* Decimal float types. */
9973 dfloat32_type_node = make_node (REAL_TYPE);
9974 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9975 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9976 layout_type (dfloat32_type_node);
9977 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9979 dfloat64_type_node = make_node (REAL_TYPE);
9980 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9981 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9982 layout_type (dfloat64_type_node);
9983 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9985 dfloat128_type_node = make_node (REAL_TYPE);
9986 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9987 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9988 layout_type (dfloat128_type_node);
9989 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9991 complex_integer_type_node = build_complex_type (integer_type_node, true);
9992 complex_float_type_node = build_complex_type (float_type_node, true);
9993 complex_double_type_node = build_complex_type (double_type_node, true);
9994 complex_long_double_type_node = build_complex_type (long_double_type_node,
9995 true);
9997 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9999 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10000 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10001 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10004 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10005 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10006 sat_ ## KIND ## _type_node = \
10007 make_sat_signed_ ## KIND ## _type (SIZE); \
10008 sat_unsigned_ ## KIND ## _type_node = \
10009 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10010 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10011 unsigned_ ## KIND ## _type_node = \
10012 make_unsigned_ ## KIND ## _type (SIZE);
10014 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10015 sat_ ## WIDTH ## KIND ## _type_node = \
10016 make_sat_signed_ ## KIND ## _type (SIZE); \
10017 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10018 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10019 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10020 unsigned_ ## WIDTH ## KIND ## _type_node = \
10021 make_unsigned_ ## KIND ## _type (SIZE);
10023 /* Make fixed-point type nodes based on four different widths. */
10024 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10025 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10026 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10027 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10028 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10030 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10031 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10032 NAME ## _type_node = \
10033 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10034 u ## NAME ## _type_node = \
10035 make_or_reuse_unsigned_ ## KIND ## _type \
10036 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10037 sat_ ## NAME ## _type_node = \
10038 make_or_reuse_sat_signed_ ## KIND ## _type \
10039 (GET_MODE_BITSIZE (MODE ## mode)); \
10040 sat_u ## NAME ## _type_node = \
10041 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10042 (GET_MODE_BITSIZE (U ## MODE ## mode));
10044 /* Fixed-point type and mode nodes. */
10045 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10046 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10047 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10048 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10049 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10050 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10051 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10052 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10053 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10054 MAKE_FIXED_MODE_NODE (accum, da, DA)
10055 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10058 tree t = targetm.build_builtin_va_list ();
10060 /* Many back-ends define record types without setting TYPE_NAME.
10061 If we copied the record type here, we'd keep the original
10062 record type without a name. This breaks name mangling. So,
10063 don't copy record types and let c_common_nodes_and_builtins()
10064 declare the type to be __builtin_va_list. */
10065 if (TREE_CODE (t) != RECORD_TYPE)
10066 t = build_variant_type_copy (t);
10068 va_list_type_node = t;
10072 /* Modify DECL for given flags.
10073 TM_PURE attribute is set only on types, so the function will modify
10074 DECL's type when ECF_TM_PURE is used. */
10076 void
10077 set_call_expr_flags (tree decl, int flags)
10079 if (flags & ECF_NOTHROW)
10080 TREE_NOTHROW (decl) = 1;
10081 if (flags & ECF_CONST)
10082 TREE_READONLY (decl) = 1;
10083 if (flags & ECF_PURE)
10084 DECL_PURE_P (decl) = 1;
10085 if (flags & ECF_LOOPING_CONST_OR_PURE)
10086 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10087 if (flags & ECF_NOVOPS)
10088 DECL_IS_NOVOPS (decl) = 1;
10089 if (flags & ECF_NORETURN)
10090 TREE_THIS_VOLATILE (decl) = 1;
10091 if (flags & ECF_MALLOC)
10092 DECL_IS_MALLOC (decl) = 1;
10093 if (flags & ECF_RETURNS_TWICE)
10094 DECL_IS_RETURNS_TWICE (decl) = 1;
10095 if (flags & ECF_LEAF)
10096 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10097 NULL, DECL_ATTRIBUTES (decl));
10098 if (flags & ECF_COLD)
10099 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10100 NULL, DECL_ATTRIBUTES (decl));
10101 if (flags & ECF_RET1)
10102 DECL_ATTRIBUTES (decl)
10103 = tree_cons (get_identifier ("fn spec"),
10104 build_tree_list (NULL_TREE, build_string (1, "1")),
10105 DECL_ATTRIBUTES (decl));
10106 if ((flags & ECF_TM_PURE) && flag_tm)
10107 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10108 /* Looping const or pure is implied by noreturn.
10109 There is currently no way to declare looping const or looping pure alone. */
10110 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10111 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10115 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10117 static void
10118 local_define_builtin (const char *name, tree type, enum built_in_function code,
10119 const char *library_name, int ecf_flags)
10121 tree decl;
10123 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10124 library_name, NULL_TREE);
10125 set_call_expr_flags (decl, ecf_flags);
10127 set_builtin_decl (code, decl, true);
10130 /* Call this function after instantiating all builtins that the language
10131 front end cares about. This will build the rest of the builtins
10132 and internal functions that are relied upon by the tree optimizers and
10133 the middle-end. */
10135 void
10136 build_common_builtin_nodes (void)
10138 tree tmp, ftype;
10139 int ecf_flags;
10141 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10142 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10144 ftype = build_function_type (void_type_node, void_list_node);
10145 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10146 local_define_builtin ("__builtin_unreachable", ftype,
10147 BUILT_IN_UNREACHABLE,
10148 "__builtin_unreachable",
10149 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10150 | ECF_CONST | ECF_COLD);
10151 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10152 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10153 "abort",
10154 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10157 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10158 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10160 ftype = build_function_type_list (ptr_type_node,
10161 ptr_type_node, const_ptr_type_node,
10162 size_type_node, NULL_TREE);
10164 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10165 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10166 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10167 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10168 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10169 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10172 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10174 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10175 const_ptr_type_node, size_type_node,
10176 NULL_TREE);
10177 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10178 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10181 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10183 ftype = build_function_type_list (ptr_type_node,
10184 ptr_type_node, integer_type_node,
10185 size_type_node, NULL_TREE);
10186 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10187 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10190 /* If we're checking the stack, `alloca' can throw. */
10191 const int alloca_flags
10192 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10194 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10196 ftype = build_function_type_list (ptr_type_node,
10197 size_type_node, NULL_TREE);
10198 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10199 "alloca", alloca_flags);
10202 ftype = build_function_type_list (ptr_type_node, size_type_node,
10203 size_type_node, NULL_TREE);
10204 local_define_builtin ("__builtin_alloca_with_align", ftype,
10205 BUILT_IN_ALLOCA_WITH_ALIGN,
10206 "__builtin_alloca_with_align",
10207 alloca_flags);
10209 ftype = build_function_type_list (ptr_type_node, size_type_node,
10210 size_type_node, size_type_node, NULL_TREE);
10211 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10212 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10213 "__builtin_alloca_with_align_and_max",
10214 alloca_flags);
10216 ftype = build_function_type_list (void_type_node,
10217 ptr_type_node, ptr_type_node,
10218 ptr_type_node, NULL_TREE);
10219 local_define_builtin ("__builtin_init_trampoline", ftype,
10220 BUILT_IN_INIT_TRAMPOLINE,
10221 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10222 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10223 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10224 "__builtin_init_heap_trampoline",
10225 ECF_NOTHROW | ECF_LEAF);
10226 local_define_builtin ("__builtin_init_descriptor", ftype,
10227 BUILT_IN_INIT_DESCRIPTOR,
10228 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10230 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10231 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10232 BUILT_IN_ADJUST_TRAMPOLINE,
10233 "__builtin_adjust_trampoline",
10234 ECF_CONST | ECF_NOTHROW);
10235 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10236 BUILT_IN_ADJUST_DESCRIPTOR,
10237 "__builtin_adjust_descriptor",
10238 ECF_CONST | ECF_NOTHROW);
10240 ftype = build_function_type_list (void_type_node,
10241 ptr_type_node, ptr_type_node, NULL_TREE);
10242 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10243 BUILT_IN_NONLOCAL_GOTO,
10244 "__builtin_nonlocal_goto",
10245 ECF_NORETURN | ECF_NOTHROW);
10247 ftype = build_function_type_list (void_type_node,
10248 ptr_type_node, ptr_type_node, NULL_TREE);
10249 local_define_builtin ("__builtin_setjmp_setup", ftype,
10250 BUILT_IN_SETJMP_SETUP,
10251 "__builtin_setjmp_setup", ECF_NOTHROW);
10253 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10254 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10255 BUILT_IN_SETJMP_RECEIVER,
10256 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10258 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10259 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10260 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10262 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10263 local_define_builtin ("__builtin_stack_restore", ftype,
10264 BUILT_IN_STACK_RESTORE,
10265 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10267 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10268 const_ptr_type_node, size_type_node,
10269 NULL_TREE);
10270 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10271 "__builtin_memcmp_eq",
10272 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10274 /* If there's a possibility that we might use the ARM EABI, build the
10275 alternate __cxa_end_cleanup node used to resume from C++. */
10276 if (targetm.arm_eabi_unwinder)
10278 ftype = build_function_type_list (void_type_node, NULL_TREE);
10279 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10280 BUILT_IN_CXA_END_CLEANUP,
10281 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10284 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10285 local_define_builtin ("__builtin_unwind_resume", ftype,
10286 BUILT_IN_UNWIND_RESUME,
10287 ((targetm_common.except_unwind_info (&global_options)
10288 == UI_SJLJ)
10289 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10290 ECF_NORETURN);
10292 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10294 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10295 NULL_TREE);
10296 local_define_builtin ("__builtin_return_address", ftype,
10297 BUILT_IN_RETURN_ADDRESS,
10298 "__builtin_return_address",
10299 ECF_NOTHROW);
10302 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10303 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10305 ftype = build_function_type_list (void_type_node, ptr_type_node,
10306 ptr_type_node, NULL_TREE);
10307 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10308 local_define_builtin ("__cyg_profile_func_enter", ftype,
10309 BUILT_IN_PROFILE_FUNC_ENTER,
10310 "__cyg_profile_func_enter", 0);
10311 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10312 local_define_builtin ("__cyg_profile_func_exit", ftype,
10313 BUILT_IN_PROFILE_FUNC_EXIT,
10314 "__cyg_profile_func_exit", 0);
10317 /* The exception object and filter values from the runtime. The argument
10318 must be zero before exception lowering, i.e. from the front end. After
10319 exception lowering, it will be the region number for the exception
10320 landing pad. These functions are PURE instead of CONST to prevent
10321 them from being hoisted past the exception edge that will initialize
10322 its value in the landing pad. */
10323 ftype = build_function_type_list (ptr_type_node,
10324 integer_type_node, NULL_TREE);
10325 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10326 /* Only use TM_PURE if we have TM language support. */
10327 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10328 ecf_flags |= ECF_TM_PURE;
10329 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10330 "__builtin_eh_pointer", ecf_flags);
10332 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10333 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10334 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10335 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10337 ftype = build_function_type_list (void_type_node,
10338 integer_type_node, integer_type_node,
10339 NULL_TREE);
10340 local_define_builtin ("__builtin_eh_copy_values", ftype,
10341 BUILT_IN_EH_COPY_VALUES,
10342 "__builtin_eh_copy_values", ECF_NOTHROW);
10344 /* Complex multiplication and division. These are handled as builtins
10345 rather than optabs because emit_library_call_value doesn't support
10346 complex. Further, we can do slightly better with folding these
10347 beasties if the real and complex parts of the arguments are separate. */
10349 int mode;
10351 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10353 char mode_name_buf[4], *q;
10354 const char *p;
10355 enum built_in_function mcode, dcode;
10356 tree type, inner_type;
10357 const char *prefix = "__";
10359 if (targetm.libfunc_gnu_prefix)
10360 prefix = "__gnu_";
10362 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10363 if (type == NULL)
10364 continue;
10365 inner_type = TREE_TYPE (type);
10367 ftype = build_function_type_list (type, inner_type, inner_type,
10368 inner_type, inner_type, NULL_TREE);
10370 mcode = ((enum built_in_function)
10371 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10372 dcode = ((enum built_in_function)
10373 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10375 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10376 *q = TOLOWER (*p);
10377 *q = '\0';
10379 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10380 NULL);
10381 local_define_builtin (built_in_names[mcode], ftype, mcode,
10382 built_in_names[mcode],
10383 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10385 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10386 NULL);
10387 local_define_builtin (built_in_names[dcode], ftype, dcode,
10388 built_in_names[dcode],
10389 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10393 init_internal_fns ();
10396 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10397 better way.
10399 If we requested a pointer to a vector, build up the pointers that
10400 we stripped off while looking for the inner type. Similarly for
10401 return values from functions.
10403 The argument TYPE is the top of the chain, and BOTTOM is the
10404 new type which we will point to. */
10406 tree
10407 reconstruct_complex_type (tree type, tree bottom)
10409 tree inner, outer;
10411 if (TREE_CODE (type) == POINTER_TYPE)
10413 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10414 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10415 TYPE_REF_CAN_ALIAS_ALL (type));
10417 else if (TREE_CODE (type) == REFERENCE_TYPE)
10419 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10420 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10421 TYPE_REF_CAN_ALIAS_ALL (type));
10423 else if (TREE_CODE (type) == ARRAY_TYPE)
10425 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10426 outer = build_array_type (inner, TYPE_DOMAIN (type));
10428 else if (TREE_CODE (type) == FUNCTION_TYPE)
10430 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10431 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10433 else if (TREE_CODE (type) == METHOD_TYPE)
10435 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10436 /* The build_method_type_directly() routine prepends 'this' to argument list,
10437 so we must compensate by getting rid of it. */
10438 outer
10439 = build_method_type_directly
10440 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10441 inner,
10442 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10444 else if (TREE_CODE (type) == OFFSET_TYPE)
10446 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10447 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10449 else
10450 return bottom;
10452 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10453 TYPE_QUALS (type));
10456 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10457 the inner type. */
10458 tree
10459 build_vector_type_for_mode (tree innertype, machine_mode mode)
10461 poly_int64 nunits;
10462 unsigned int bitsize;
10464 switch (GET_MODE_CLASS (mode))
10466 case MODE_VECTOR_BOOL:
10467 case MODE_VECTOR_INT:
10468 case MODE_VECTOR_FLOAT:
10469 case MODE_VECTOR_FRACT:
10470 case MODE_VECTOR_UFRACT:
10471 case MODE_VECTOR_ACCUM:
10472 case MODE_VECTOR_UACCUM:
10473 nunits = GET_MODE_NUNITS (mode);
10474 break;
10476 case MODE_INT:
10477 /* Check that there are no leftover bits. */
10478 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10479 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10480 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10481 break;
10483 default:
10484 gcc_unreachable ();
10487 return make_vector_type (innertype, nunits, mode);
10490 /* Similarly, but takes the inner type and number of units, which must be
10491 a power of two. */
10493 tree
10494 build_vector_type (tree innertype, poly_int64 nunits)
10496 return make_vector_type (innertype, nunits, VOIDmode);
10499 /* Build truth vector with specified length and number of units. */
10501 tree
10502 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
10504 machine_mode mask_mode
10505 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10507 poly_uint64 vsize;
10508 if (mask_mode == BLKmode)
10509 vsize = vector_size * BITS_PER_UNIT;
10510 else
10511 vsize = GET_MODE_BITSIZE (mask_mode);
10513 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10515 tree bool_type = build_nonstandard_boolean_type (esize);
10517 return make_vector_type (bool_type, nunits, mask_mode);
10520 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10522 tree
10523 build_same_sized_truth_vector_type (tree vectype)
10525 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10526 return vectype;
10528 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
10530 if (known_eq (size, 0U))
10531 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10533 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10536 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10538 tree
10539 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10541 tree t = make_vector_type (innertype, nunits, VOIDmode);
10542 tree cand;
10543 /* We always build the non-opaque variant before the opaque one,
10544 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10545 cand = TYPE_NEXT_VARIANT (t);
10546 if (cand
10547 && TYPE_VECTOR_OPAQUE (cand)
10548 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10549 return cand;
10550 /* Othewise build a variant type and make sure to queue it after
10551 the non-opaque type. */
10552 cand = build_distinct_type_copy (t);
10553 TYPE_VECTOR_OPAQUE (cand) = true;
10554 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10555 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10556 TYPE_NEXT_VARIANT (t) = cand;
10557 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10558 return cand;
10561 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10563 wide_int
10564 vector_cst_int_elt (const_tree t, unsigned int i)
10566 /* First handle elements that are directly encoded. */
10567 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10568 if (i < encoded_nelts)
10569 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10571 /* Identify the pattern that contains element I and work out the index of
10572 the last encoded element for that pattern. */
10573 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10574 unsigned int pattern = i % npatterns;
10575 unsigned int count = i / npatterns;
10576 unsigned int final_i = encoded_nelts - npatterns + pattern;
10578 /* If there are no steps, the final encoded value is the right one. */
10579 if (!VECTOR_CST_STEPPED_P (t))
10580 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10582 /* Otherwise work out the value from the last two encoded elements. */
10583 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10584 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10585 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10586 return wi::to_wide (v2) + (count - 2) * diff;
10589 /* Return the value of element I of VECTOR_CST T. */
10591 tree
10592 vector_cst_elt (const_tree t, unsigned int i)
10594 /* First handle elements that are directly encoded. */
10595 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10596 if (i < encoded_nelts)
10597 return VECTOR_CST_ENCODED_ELT (t, i);
10599 /* If there are no steps, the final encoded value is the right one. */
10600 if (!VECTOR_CST_STEPPED_P (t))
10602 /* Identify the pattern that contains element I and work out the index of
10603 the last encoded element for that pattern. */
10604 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10605 unsigned int pattern = i % npatterns;
10606 unsigned int final_i = encoded_nelts - npatterns + pattern;
10607 return VECTOR_CST_ENCODED_ELT (t, final_i);
10610 /* Otherwise work out the value from the last two encoded elements. */
10611 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10612 vector_cst_int_elt (t, i));
10615 /* Given an initializer INIT, return TRUE if INIT is zero or some
10616 aggregate of zeros. Otherwise return FALSE. */
10617 bool
10618 initializer_zerop (const_tree init)
10620 tree elt;
10622 STRIP_NOPS (init);
10624 switch (TREE_CODE (init))
10626 case INTEGER_CST:
10627 return integer_zerop (init);
10629 case REAL_CST:
10630 /* ??? Note that this is not correct for C4X float formats. There,
10631 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10632 negative exponent. */
10633 return real_zerop (init)
10634 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10636 case FIXED_CST:
10637 return fixed_zerop (init);
10639 case COMPLEX_CST:
10640 return integer_zerop (init)
10641 || (real_zerop (init)
10642 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10643 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10645 case VECTOR_CST:
10646 return (VECTOR_CST_NPATTERNS (init) == 1
10647 && VECTOR_CST_DUPLICATE_P (init)
10648 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)));
10650 case CONSTRUCTOR:
10652 unsigned HOST_WIDE_INT idx;
10654 if (TREE_CLOBBER_P (init))
10655 return false;
10656 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10657 if (!initializer_zerop (elt))
10658 return false;
10659 return true;
10662 case STRING_CST:
10664 int i;
10666 /* We need to loop through all elements to handle cases like
10667 "\0" and "\0foobar". */
10668 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10669 if (TREE_STRING_POINTER (init)[i] != '\0')
10670 return false;
10672 return true;
10675 default:
10676 return false;
10680 /* Check if vector VEC consists of all the equal elements and
10681 that the number of elements corresponds to the type of VEC.
10682 The function returns first element of the vector
10683 or NULL_TREE if the vector is not uniform. */
10684 tree
10685 uniform_vector_p (const_tree vec)
10687 tree first, t;
10688 unsigned HOST_WIDE_INT i, nelts;
10690 if (vec == NULL_TREE)
10691 return NULL_TREE;
10693 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10695 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10696 return TREE_OPERAND (vec, 0);
10698 else if (TREE_CODE (vec) == VECTOR_CST)
10700 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10701 return VECTOR_CST_ENCODED_ELT (vec, 0);
10702 return NULL_TREE;
10705 else if (TREE_CODE (vec) == CONSTRUCTOR
10706 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10708 first = error_mark_node;
10710 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10712 if (i == 0)
10714 first = t;
10715 continue;
10717 if (!operand_equal_p (first, t, 0))
10718 return NULL_TREE;
10720 if (i != nelts)
10721 return NULL_TREE;
10723 return first;
10726 return NULL_TREE;
10729 /* Build an empty statement at location LOC. */
10731 tree
10732 build_empty_stmt (location_t loc)
10734 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10735 SET_EXPR_LOCATION (t, loc);
10736 return t;
10740 /* Build an OpenMP clause with code CODE. LOC is the location of the
10741 clause. */
10743 tree
10744 build_omp_clause (location_t loc, enum omp_clause_code code)
10746 tree t;
10747 int size, length;
10749 length = omp_clause_num_ops[code];
10750 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10752 record_node_allocation_statistics (OMP_CLAUSE, size);
10754 t = (tree) ggc_internal_alloc (size);
10755 memset (t, 0, size);
10756 TREE_SET_CODE (t, OMP_CLAUSE);
10757 OMP_CLAUSE_SET_CODE (t, code);
10758 OMP_CLAUSE_LOCATION (t) = loc;
10760 return t;
10763 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10764 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10765 Except for the CODE and operand count field, other storage for the
10766 object is initialized to zeros. */
10768 tree
10769 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10771 tree t;
10772 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10774 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10775 gcc_assert (len >= 1);
10777 record_node_allocation_statistics (code, length);
10779 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10781 TREE_SET_CODE (t, code);
10783 /* Can't use TREE_OPERAND to store the length because if checking is
10784 enabled, it will try to check the length before we store it. :-P */
10785 t->exp.operands[0] = build_int_cst (sizetype, len);
10787 return t;
10790 /* Helper function for build_call_* functions; build a CALL_EXPR with
10791 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10792 the argument slots. */
10794 static tree
10795 build_call_1 (tree return_type, tree fn, int nargs)
10797 tree t;
10799 t = build_vl_exp (CALL_EXPR, nargs + 3);
10800 TREE_TYPE (t) = return_type;
10801 CALL_EXPR_FN (t) = fn;
10802 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10804 return t;
10807 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10808 FN and a null static chain slot. NARGS is the number of call arguments
10809 which are specified as "..." arguments. */
10811 tree
10812 build_call_nary (tree return_type, tree fn, int nargs, ...)
10814 tree ret;
10815 va_list args;
10816 va_start (args, nargs);
10817 ret = build_call_valist (return_type, fn, nargs, args);
10818 va_end (args);
10819 return ret;
10822 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10823 FN and a null static chain slot. NARGS is the number of call arguments
10824 which are specified as a va_list ARGS. */
10826 tree
10827 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10829 tree t;
10830 int i;
10832 t = build_call_1 (return_type, fn, nargs);
10833 for (i = 0; i < nargs; i++)
10834 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10835 process_call_operands (t);
10836 return t;
10839 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10840 FN and a null static chain slot. NARGS is the number of call arguments
10841 which are specified as a tree array ARGS. */
10843 tree
10844 build_call_array_loc (location_t loc, tree return_type, tree fn,
10845 int nargs, const tree *args)
10847 tree t;
10848 int i;
10850 t = build_call_1 (return_type, fn, nargs);
10851 for (i = 0; i < nargs; i++)
10852 CALL_EXPR_ARG (t, i) = args[i];
10853 process_call_operands (t);
10854 SET_EXPR_LOCATION (t, loc);
10855 return t;
10858 /* Like build_call_array, but takes a vec. */
10860 tree
10861 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10863 tree ret, t;
10864 unsigned int ix;
10866 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10867 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10868 CALL_EXPR_ARG (ret, ix) = t;
10869 process_call_operands (ret);
10870 return ret;
10873 /* Conveniently construct a function call expression. FNDECL names the
10874 function to be called and N arguments are passed in the array
10875 ARGARRAY. */
10877 tree
10878 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10880 tree fntype = TREE_TYPE (fndecl);
10881 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10883 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10886 /* Conveniently construct a function call expression. FNDECL names the
10887 function to be called and the arguments are passed in the vector
10888 VEC. */
10890 tree
10891 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10893 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10894 vec_safe_address (vec));
10898 /* Conveniently construct a function call expression. FNDECL names the
10899 function to be called, N is the number of arguments, and the "..."
10900 parameters are the argument expressions. */
10902 tree
10903 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10905 va_list ap;
10906 tree *argarray = XALLOCAVEC (tree, n);
10907 int i;
10909 va_start (ap, n);
10910 for (i = 0; i < n; i++)
10911 argarray[i] = va_arg (ap, tree);
10912 va_end (ap);
10913 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10916 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10917 varargs macros aren't supported by all bootstrap compilers. */
10919 tree
10920 build_call_expr (tree fndecl, int n, ...)
10922 va_list ap;
10923 tree *argarray = XALLOCAVEC (tree, n);
10924 int i;
10926 va_start (ap, n);
10927 for (i = 0; i < n; i++)
10928 argarray[i] = va_arg (ap, tree);
10929 va_end (ap);
10930 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10933 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10934 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10935 It will get gimplified later into an ordinary internal function. */
10937 tree
10938 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10939 tree type, int n, const tree *args)
10941 tree t = build_call_1 (type, NULL_TREE, n);
10942 for (int i = 0; i < n; ++i)
10943 CALL_EXPR_ARG (t, i) = args[i];
10944 SET_EXPR_LOCATION (t, loc);
10945 CALL_EXPR_IFN (t) = ifn;
10946 return t;
10949 /* Build internal call expression. This is just like CALL_EXPR, except
10950 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10951 internal function. */
10953 tree
10954 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10955 tree type, int n, ...)
10957 va_list ap;
10958 tree *argarray = XALLOCAVEC (tree, n);
10959 int i;
10961 va_start (ap, n);
10962 for (i = 0; i < n; i++)
10963 argarray[i] = va_arg (ap, tree);
10964 va_end (ap);
10965 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10968 /* Return a function call to FN, if the target is guaranteed to support it,
10969 or null otherwise.
10971 N is the number of arguments, passed in the "...", and TYPE is the
10972 type of the return value. */
10974 tree
10975 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10976 int n, ...)
10978 va_list ap;
10979 tree *argarray = XALLOCAVEC (tree, n);
10980 int i;
10982 va_start (ap, n);
10983 for (i = 0; i < n; i++)
10984 argarray[i] = va_arg (ap, tree);
10985 va_end (ap);
10986 if (internal_fn_p (fn))
10988 internal_fn ifn = as_internal_fn (fn);
10989 if (direct_internal_fn_p (ifn))
10991 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10992 if (!direct_internal_fn_supported_p (ifn, types,
10993 OPTIMIZE_FOR_BOTH))
10994 return NULL_TREE;
10996 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10998 else
11000 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11001 if (!fndecl)
11002 return NULL_TREE;
11003 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11007 /* Return a function call to the appropriate builtin alloca variant.
11009 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11010 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11011 bound for SIZE in case it is not a fixed value. */
11013 tree
11014 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11016 if (max_size >= 0)
11018 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11019 return
11020 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11022 else if (align > 0)
11024 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11025 return build_call_expr (t, 2, size, size_int (align));
11027 else
11029 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11030 return build_call_expr (t, 1, size);
11034 /* Create a new constant string literal and return a char* pointer to it.
11035 The STRING_CST value is the LEN characters at STR. */
11036 tree
11037 build_string_literal (int len, const char *str)
11039 tree t, elem, index, type;
11041 t = build_string (len, str);
11042 elem = build_type_variant (char_type_node, 1, 0);
11043 index = build_index_type (size_int (len - 1));
11044 type = build_array_type (elem, index);
11045 TREE_TYPE (t) = type;
11046 TREE_CONSTANT (t) = 1;
11047 TREE_READONLY (t) = 1;
11048 TREE_STATIC (t) = 1;
11050 type = build_pointer_type (elem);
11051 t = build1 (ADDR_EXPR, type,
11052 build4 (ARRAY_REF, elem,
11053 t, integer_zero_node, NULL_TREE, NULL_TREE));
11054 return t;
11059 /* Return true if T (assumed to be a DECL) must be assigned a memory
11060 location. */
11062 bool
11063 needs_to_live_in_memory (const_tree t)
11065 return (TREE_ADDRESSABLE (t)
11066 || is_global_var (t)
11067 || (TREE_CODE (t) == RESULT_DECL
11068 && !DECL_BY_REFERENCE (t)
11069 && aggregate_value_p (t, current_function_decl)));
11072 /* Return value of a constant X and sign-extend it. */
11074 HOST_WIDE_INT
11075 int_cst_value (const_tree x)
11077 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11078 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11080 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11081 gcc_assert (cst_and_fits_in_hwi (x));
11083 if (bits < HOST_BITS_PER_WIDE_INT)
11085 bool negative = ((val >> (bits - 1)) & 1) != 0;
11086 if (negative)
11087 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11088 else
11089 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11092 return val;
11095 /* If TYPE is an integral or pointer type, return an integer type with
11096 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11097 if TYPE is already an integer type of signedness UNSIGNEDP. */
11099 tree
11100 signed_or_unsigned_type_for (int unsignedp, tree type)
11102 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11103 return type;
11105 if (TREE_CODE (type) == VECTOR_TYPE)
11107 tree inner = TREE_TYPE (type);
11108 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11109 if (!inner2)
11110 return NULL_TREE;
11111 if (inner == inner2)
11112 return type;
11113 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11116 if (!INTEGRAL_TYPE_P (type)
11117 && !POINTER_TYPE_P (type)
11118 && TREE_CODE (type) != OFFSET_TYPE)
11119 return NULL_TREE;
11121 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11124 /* If TYPE is an integral or pointer type, return an integer type with
11125 the same precision which is unsigned, or itself if TYPE is already an
11126 unsigned integer type. */
11128 tree
11129 unsigned_type_for (tree type)
11131 return signed_or_unsigned_type_for (1, type);
11134 /* If TYPE is an integral or pointer type, return an integer type with
11135 the same precision which is signed, or itself if TYPE is already a
11136 signed integer type. */
11138 tree
11139 signed_type_for (tree type)
11141 return signed_or_unsigned_type_for (0, type);
11144 /* If TYPE is a vector type, return a signed integer vector type with the
11145 same width and number of subparts. Otherwise return boolean_type_node. */
11147 tree
11148 truth_type_for (tree type)
11150 if (TREE_CODE (type) == VECTOR_TYPE)
11152 if (VECTOR_BOOLEAN_TYPE_P (type))
11153 return type;
11154 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11155 GET_MODE_SIZE (TYPE_MODE (type)));
11157 else
11158 return boolean_type_node;
11161 /* Returns the largest value obtainable by casting something in INNER type to
11162 OUTER type. */
11164 tree
11165 upper_bound_in_type (tree outer, tree inner)
11167 unsigned int det = 0;
11168 unsigned oprec = TYPE_PRECISION (outer);
11169 unsigned iprec = TYPE_PRECISION (inner);
11170 unsigned prec;
11172 /* Compute a unique number for every combination. */
11173 det |= (oprec > iprec) ? 4 : 0;
11174 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11175 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11177 /* Determine the exponent to use. */
11178 switch (det)
11180 case 0:
11181 case 1:
11182 /* oprec <= iprec, outer: signed, inner: don't care. */
11183 prec = oprec - 1;
11184 break;
11185 case 2:
11186 case 3:
11187 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11188 prec = oprec;
11189 break;
11190 case 4:
11191 /* oprec > iprec, outer: signed, inner: signed. */
11192 prec = iprec - 1;
11193 break;
11194 case 5:
11195 /* oprec > iprec, outer: signed, inner: unsigned. */
11196 prec = iprec;
11197 break;
11198 case 6:
11199 /* oprec > iprec, outer: unsigned, inner: signed. */
11200 prec = oprec;
11201 break;
11202 case 7:
11203 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11204 prec = iprec;
11205 break;
11206 default:
11207 gcc_unreachable ();
11210 return wide_int_to_tree (outer,
11211 wi::mask (prec, false, TYPE_PRECISION (outer)));
11214 /* Returns the smallest value obtainable by casting something in INNER type to
11215 OUTER type. */
11217 tree
11218 lower_bound_in_type (tree outer, tree inner)
11220 unsigned oprec = TYPE_PRECISION (outer);
11221 unsigned iprec = TYPE_PRECISION (inner);
11223 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11224 and obtain 0. */
11225 if (TYPE_UNSIGNED (outer)
11226 /* If we are widening something of an unsigned type, OUTER type
11227 contains all values of INNER type. In particular, both INNER
11228 and OUTER types have zero in common. */
11229 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11230 return build_int_cst (outer, 0);
11231 else
11233 /* If we are widening a signed type to another signed type, we
11234 want to obtain -2^^(iprec-1). If we are keeping the
11235 precision or narrowing to a signed type, we want to obtain
11236 -2^(oprec-1). */
11237 unsigned prec = oprec > iprec ? iprec : oprec;
11238 return wide_int_to_tree (outer,
11239 wi::mask (prec - 1, true,
11240 TYPE_PRECISION (outer)));
11244 /* Return nonzero if two operands that are suitable for PHI nodes are
11245 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11246 SSA_NAME or invariant. Note that this is strictly an optimization.
11247 That is, callers of this function can directly call operand_equal_p
11248 and get the same result, only slower. */
11251 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11253 if (arg0 == arg1)
11254 return 1;
11255 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11256 return 0;
11257 return operand_equal_p (arg0, arg1, 0);
11260 /* Returns number of zeros at the end of binary representation of X. */
11262 tree
11263 num_ending_zeros (const_tree x)
11265 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11269 #define WALK_SUBTREE(NODE) \
11270 do \
11272 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11273 if (result) \
11274 return result; \
11276 while (0)
11278 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11279 be walked whenever a type is seen in the tree. Rest of operands and return
11280 value are as for walk_tree. */
11282 static tree
11283 walk_type_fields (tree type, walk_tree_fn func, void *data,
11284 hash_set<tree> *pset, walk_tree_lh lh)
11286 tree result = NULL_TREE;
11288 switch (TREE_CODE (type))
11290 case POINTER_TYPE:
11291 case REFERENCE_TYPE:
11292 case VECTOR_TYPE:
11293 /* We have to worry about mutually recursive pointers. These can't
11294 be written in C. They can in Ada. It's pathological, but
11295 there's an ACATS test (c38102a) that checks it. Deal with this
11296 by checking if we're pointing to another pointer, that one
11297 points to another pointer, that one does too, and we have no htab.
11298 If so, get a hash table. We check three levels deep to avoid
11299 the cost of the hash table if we don't need one. */
11300 if (POINTER_TYPE_P (TREE_TYPE (type))
11301 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11302 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11303 && !pset)
11305 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11306 func, data);
11307 if (result)
11308 return result;
11310 break;
11313 /* fall through */
11315 case COMPLEX_TYPE:
11316 WALK_SUBTREE (TREE_TYPE (type));
11317 break;
11319 case METHOD_TYPE:
11320 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11322 /* Fall through. */
11324 case FUNCTION_TYPE:
11325 WALK_SUBTREE (TREE_TYPE (type));
11327 tree arg;
11329 /* We never want to walk into default arguments. */
11330 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11331 WALK_SUBTREE (TREE_VALUE (arg));
11333 break;
11335 case ARRAY_TYPE:
11336 /* Don't follow this nodes's type if a pointer for fear that
11337 we'll have infinite recursion. If we have a PSET, then we
11338 need not fear. */
11339 if (pset
11340 || (!POINTER_TYPE_P (TREE_TYPE (type))
11341 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11342 WALK_SUBTREE (TREE_TYPE (type));
11343 WALK_SUBTREE (TYPE_DOMAIN (type));
11344 break;
11346 case OFFSET_TYPE:
11347 WALK_SUBTREE (TREE_TYPE (type));
11348 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11349 break;
11351 default:
11352 break;
11355 return NULL_TREE;
11358 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11359 called with the DATA and the address of each sub-tree. If FUNC returns a
11360 non-NULL value, the traversal is stopped, and the value returned by FUNC
11361 is returned. If PSET is non-NULL it is used to record the nodes visited,
11362 and to avoid visiting a node more than once. */
11364 tree
11365 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11366 hash_set<tree> *pset, walk_tree_lh lh)
11368 enum tree_code code;
11369 int walk_subtrees;
11370 tree result;
11372 #define WALK_SUBTREE_TAIL(NODE) \
11373 do \
11375 tp = & (NODE); \
11376 goto tail_recurse; \
11378 while (0)
11380 tail_recurse:
11381 /* Skip empty subtrees. */
11382 if (!*tp)
11383 return NULL_TREE;
11385 /* Don't walk the same tree twice, if the user has requested
11386 that we avoid doing so. */
11387 if (pset && pset->add (*tp))
11388 return NULL_TREE;
11390 /* Call the function. */
11391 walk_subtrees = 1;
11392 result = (*func) (tp, &walk_subtrees, data);
11394 /* If we found something, return it. */
11395 if (result)
11396 return result;
11398 code = TREE_CODE (*tp);
11400 /* Even if we didn't, FUNC may have decided that there was nothing
11401 interesting below this point in the tree. */
11402 if (!walk_subtrees)
11404 /* But we still need to check our siblings. */
11405 if (code == TREE_LIST)
11406 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11407 else if (code == OMP_CLAUSE)
11408 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11409 else
11410 return NULL_TREE;
11413 if (lh)
11415 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11416 if (result || !walk_subtrees)
11417 return result;
11420 switch (code)
11422 case ERROR_MARK:
11423 case IDENTIFIER_NODE:
11424 case INTEGER_CST:
11425 case REAL_CST:
11426 case FIXED_CST:
11427 case VECTOR_CST:
11428 case STRING_CST:
11429 case BLOCK:
11430 case PLACEHOLDER_EXPR:
11431 case SSA_NAME:
11432 case FIELD_DECL:
11433 case RESULT_DECL:
11434 /* None of these have subtrees other than those already walked
11435 above. */
11436 break;
11438 case TREE_LIST:
11439 WALK_SUBTREE (TREE_VALUE (*tp));
11440 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11441 break;
11443 case TREE_VEC:
11445 int len = TREE_VEC_LENGTH (*tp);
11447 if (len == 0)
11448 break;
11450 /* Walk all elements but the first. */
11451 while (--len)
11452 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11454 /* Now walk the first one as a tail call. */
11455 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11458 case COMPLEX_CST:
11459 WALK_SUBTREE (TREE_REALPART (*tp));
11460 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11462 case CONSTRUCTOR:
11464 unsigned HOST_WIDE_INT idx;
11465 constructor_elt *ce;
11467 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11468 idx++)
11469 WALK_SUBTREE (ce->value);
11471 break;
11473 case SAVE_EXPR:
11474 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11476 case BIND_EXPR:
11478 tree decl;
11479 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11481 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11482 into declarations that are just mentioned, rather than
11483 declared; they don't really belong to this part of the tree.
11484 And, we can see cycles: the initializer for a declaration
11485 can refer to the declaration itself. */
11486 WALK_SUBTREE (DECL_INITIAL (decl));
11487 WALK_SUBTREE (DECL_SIZE (decl));
11488 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11490 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11493 case STATEMENT_LIST:
11495 tree_stmt_iterator i;
11496 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11497 WALK_SUBTREE (*tsi_stmt_ptr (i));
11499 break;
11501 case OMP_CLAUSE:
11502 switch (OMP_CLAUSE_CODE (*tp))
11504 case OMP_CLAUSE_GANG:
11505 case OMP_CLAUSE__GRIDDIM_:
11506 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11507 /* FALLTHRU */
11509 case OMP_CLAUSE_ASYNC:
11510 case OMP_CLAUSE_WAIT:
11511 case OMP_CLAUSE_WORKER:
11512 case OMP_CLAUSE_VECTOR:
11513 case OMP_CLAUSE_NUM_GANGS:
11514 case OMP_CLAUSE_NUM_WORKERS:
11515 case OMP_CLAUSE_VECTOR_LENGTH:
11516 case OMP_CLAUSE_PRIVATE:
11517 case OMP_CLAUSE_SHARED:
11518 case OMP_CLAUSE_FIRSTPRIVATE:
11519 case OMP_CLAUSE_COPYIN:
11520 case OMP_CLAUSE_COPYPRIVATE:
11521 case OMP_CLAUSE_FINAL:
11522 case OMP_CLAUSE_IF:
11523 case OMP_CLAUSE_NUM_THREADS:
11524 case OMP_CLAUSE_SCHEDULE:
11525 case OMP_CLAUSE_UNIFORM:
11526 case OMP_CLAUSE_DEPEND:
11527 case OMP_CLAUSE_NUM_TEAMS:
11528 case OMP_CLAUSE_THREAD_LIMIT:
11529 case OMP_CLAUSE_DEVICE:
11530 case OMP_CLAUSE_DIST_SCHEDULE:
11531 case OMP_CLAUSE_SAFELEN:
11532 case OMP_CLAUSE_SIMDLEN:
11533 case OMP_CLAUSE_ORDERED:
11534 case OMP_CLAUSE_PRIORITY:
11535 case OMP_CLAUSE_GRAINSIZE:
11536 case OMP_CLAUSE_NUM_TASKS:
11537 case OMP_CLAUSE_HINT:
11538 case OMP_CLAUSE_TO_DECLARE:
11539 case OMP_CLAUSE_LINK:
11540 case OMP_CLAUSE_USE_DEVICE_PTR:
11541 case OMP_CLAUSE_IS_DEVICE_PTR:
11542 case OMP_CLAUSE__LOOPTEMP_:
11543 case OMP_CLAUSE__SIMDUID_:
11544 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11545 /* FALLTHRU */
11547 case OMP_CLAUSE_INDEPENDENT:
11548 case OMP_CLAUSE_NOWAIT:
11549 case OMP_CLAUSE_DEFAULT:
11550 case OMP_CLAUSE_UNTIED:
11551 case OMP_CLAUSE_MERGEABLE:
11552 case OMP_CLAUSE_PROC_BIND:
11553 case OMP_CLAUSE_INBRANCH:
11554 case OMP_CLAUSE_NOTINBRANCH:
11555 case OMP_CLAUSE_FOR:
11556 case OMP_CLAUSE_PARALLEL:
11557 case OMP_CLAUSE_SECTIONS:
11558 case OMP_CLAUSE_TASKGROUP:
11559 case OMP_CLAUSE_NOGROUP:
11560 case OMP_CLAUSE_THREADS:
11561 case OMP_CLAUSE_SIMD:
11562 case OMP_CLAUSE_DEFAULTMAP:
11563 case OMP_CLAUSE_AUTO:
11564 case OMP_CLAUSE_SEQ:
11565 case OMP_CLAUSE_TILE:
11566 case OMP_CLAUSE__SIMT_:
11567 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11569 case OMP_CLAUSE_LASTPRIVATE:
11570 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11571 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11572 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11574 case OMP_CLAUSE_COLLAPSE:
11576 int i;
11577 for (i = 0; i < 3; i++)
11578 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11579 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11582 case OMP_CLAUSE_LINEAR:
11583 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11584 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11585 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11586 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11588 case OMP_CLAUSE_ALIGNED:
11589 case OMP_CLAUSE_FROM:
11590 case OMP_CLAUSE_TO:
11591 case OMP_CLAUSE_MAP:
11592 case OMP_CLAUSE__CACHE_:
11593 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11594 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11595 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11597 case OMP_CLAUSE_REDUCTION:
11599 int i;
11600 for (i = 0; i < 5; i++)
11601 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11602 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11605 default:
11606 gcc_unreachable ();
11608 break;
11610 case TARGET_EXPR:
11612 int i, len;
11614 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11615 But, we only want to walk once. */
11616 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11617 for (i = 0; i < len; ++i)
11618 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11619 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11622 case DECL_EXPR:
11623 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11624 defining. We only want to walk into these fields of a type in this
11625 case and not in the general case of a mere reference to the type.
11627 The criterion is as follows: if the field can be an expression, it
11628 must be walked only here. This should be in keeping with the fields
11629 that are directly gimplified in gimplify_type_sizes in order for the
11630 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11631 variable-sized types.
11633 Note that DECLs get walked as part of processing the BIND_EXPR. */
11634 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11636 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11637 if (TREE_CODE (*type_p) == ERROR_MARK)
11638 return NULL_TREE;
11640 /* Call the function for the type. See if it returns anything or
11641 doesn't want us to continue. If we are to continue, walk both
11642 the normal fields and those for the declaration case. */
11643 result = (*func) (type_p, &walk_subtrees, data);
11644 if (result || !walk_subtrees)
11645 return result;
11647 /* But do not walk a pointed-to type since it may itself need to
11648 be walked in the declaration case if it isn't anonymous. */
11649 if (!POINTER_TYPE_P (*type_p))
11651 result = walk_type_fields (*type_p, func, data, pset, lh);
11652 if (result)
11653 return result;
11656 /* If this is a record type, also walk the fields. */
11657 if (RECORD_OR_UNION_TYPE_P (*type_p))
11659 tree field;
11661 for (field = TYPE_FIELDS (*type_p); field;
11662 field = DECL_CHAIN (field))
11664 /* We'd like to look at the type of the field, but we can
11665 easily get infinite recursion. So assume it's pointed
11666 to elsewhere in the tree. Also, ignore things that
11667 aren't fields. */
11668 if (TREE_CODE (field) != FIELD_DECL)
11669 continue;
11671 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11672 WALK_SUBTREE (DECL_SIZE (field));
11673 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11674 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11675 WALK_SUBTREE (DECL_QUALIFIER (field));
11679 /* Same for scalar types. */
11680 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11681 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11682 || TREE_CODE (*type_p) == INTEGER_TYPE
11683 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11684 || TREE_CODE (*type_p) == REAL_TYPE)
11686 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11687 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11690 WALK_SUBTREE (TYPE_SIZE (*type_p));
11691 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11693 /* FALLTHRU */
11695 default:
11696 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11698 int i, len;
11700 /* Walk over all the sub-trees of this operand. */
11701 len = TREE_OPERAND_LENGTH (*tp);
11703 /* Go through the subtrees. We need to do this in forward order so
11704 that the scope of a FOR_EXPR is handled properly. */
11705 if (len)
11707 for (i = 0; i < len - 1; ++i)
11708 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11709 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11712 /* If this is a type, walk the needed fields in the type. */
11713 else if (TYPE_P (*tp))
11714 return walk_type_fields (*tp, func, data, pset, lh);
11715 break;
11718 /* We didn't find what we were looking for. */
11719 return NULL_TREE;
11721 #undef WALK_SUBTREE_TAIL
11723 #undef WALK_SUBTREE
11725 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11727 tree
11728 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11729 walk_tree_lh lh)
11731 tree result;
11733 hash_set<tree> pset;
11734 result = walk_tree_1 (tp, func, data, &pset, lh);
11735 return result;
11739 tree
11740 tree_block (tree t)
11742 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11744 if (IS_EXPR_CODE_CLASS (c))
11745 return LOCATION_BLOCK (t->exp.locus);
11746 gcc_unreachable ();
11747 return NULL;
11750 void
11751 tree_set_block (tree t, tree b)
11753 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11755 if (IS_EXPR_CODE_CLASS (c))
11757 t->exp.locus = set_block (t->exp.locus, b);
11759 else
11760 gcc_unreachable ();
11763 /* Create a nameless artificial label and put it in the current
11764 function context. The label has a location of LOC. Returns the
11765 newly created label. */
11767 tree
11768 create_artificial_label (location_t loc)
11770 tree lab = build_decl (loc,
11771 LABEL_DECL, NULL_TREE, void_type_node);
11773 DECL_ARTIFICIAL (lab) = 1;
11774 DECL_IGNORED_P (lab) = 1;
11775 DECL_CONTEXT (lab) = current_function_decl;
11776 return lab;
11779 /* Given a tree, try to return a useful variable name that we can use
11780 to prefix a temporary that is being assigned the value of the tree.
11781 I.E. given <temp> = &A, return A. */
11783 const char *
11784 get_name (tree t)
11786 tree stripped_decl;
11788 stripped_decl = t;
11789 STRIP_NOPS (stripped_decl);
11790 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11791 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11792 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11794 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11795 if (!name)
11796 return NULL;
11797 return IDENTIFIER_POINTER (name);
11799 else
11801 switch (TREE_CODE (stripped_decl))
11803 case ADDR_EXPR:
11804 return get_name (TREE_OPERAND (stripped_decl, 0));
11805 default:
11806 return NULL;
11811 /* Return true if TYPE has a variable argument list. */
11813 bool
11814 stdarg_p (const_tree fntype)
11816 function_args_iterator args_iter;
11817 tree n = NULL_TREE, t;
11819 if (!fntype)
11820 return false;
11822 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11824 n = t;
11827 return n != NULL_TREE && n != void_type_node;
11830 /* Return true if TYPE has a prototype. */
11832 bool
11833 prototype_p (const_tree fntype)
11835 tree t;
11837 gcc_assert (fntype != NULL_TREE);
11839 t = TYPE_ARG_TYPES (fntype);
11840 return (t != NULL_TREE);
11843 /* If BLOCK is inlined from an __attribute__((__artificial__))
11844 routine, return pointer to location from where it has been
11845 called. */
11846 location_t *
11847 block_nonartificial_location (tree block)
11849 location_t *ret = NULL;
11851 while (block && TREE_CODE (block) == BLOCK
11852 && BLOCK_ABSTRACT_ORIGIN (block))
11854 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11856 while (TREE_CODE (ao) == BLOCK
11857 && BLOCK_ABSTRACT_ORIGIN (ao)
11858 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11859 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11861 if (TREE_CODE (ao) == FUNCTION_DECL)
11863 /* If AO is an artificial inline, point RET to the
11864 call site locus at which it has been inlined and continue
11865 the loop, in case AO's caller is also an artificial
11866 inline. */
11867 if (DECL_DECLARED_INLINE_P (ao)
11868 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11869 ret = &BLOCK_SOURCE_LOCATION (block);
11870 else
11871 break;
11873 else if (TREE_CODE (ao) != BLOCK)
11874 break;
11876 block = BLOCK_SUPERCONTEXT (block);
11878 return ret;
11882 /* If EXP is inlined from an __attribute__((__artificial__))
11883 function, return the location of the original call expression. */
11885 location_t
11886 tree_nonartificial_location (tree exp)
11888 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11890 if (loc)
11891 return *loc;
11892 else
11893 return EXPR_LOCATION (exp);
11897 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11898 nodes. */
11900 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11902 hashval_t
11903 cl_option_hasher::hash (tree x)
11905 const_tree const t = x;
11906 const char *p;
11907 size_t i;
11908 size_t len = 0;
11909 hashval_t hash = 0;
11911 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11913 p = (const char *)TREE_OPTIMIZATION (t);
11914 len = sizeof (struct cl_optimization);
11917 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11918 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11920 else
11921 gcc_unreachable ();
11923 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11924 something else. */
11925 for (i = 0; i < len; i++)
11926 if (p[i])
11927 hash = (hash << 4) ^ ((i << 2) | p[i]);
11929 return hash;
11932 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11933 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11934 same. */
11936 bool
11937 cl_option_hasher::equal (tree x, tree y)
11939 const_tree const xt = x;
11940 const_tree const yt = y;
11941 const char *xp;
11942 const char *yp;
11943 size_t len;
11945 if (TREE_CODE (xt) != TREE_CODE (yt))
11946 return 0;
11948 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11950 xp = (const char *)TREE_OPTIMIZATION (xt);
11951 yp = (const char *)TREE_OPTIMIZATION (yt);
11952 len = sizeof (struct cl_optimization);
11955 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11957 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11958 TREE_TARGET_OPTION (yt));
11961 else
11962 gcc_unreachable ();
11964 return (memcmp (xp, yp, len) == 0);
11967 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11969 tree
11970 build_optimization_node (struct gcc_options *opts)
11972 tree t;
11974 /* Use the cache of optimization nodes. */
11976 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11977 opts);
11979 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11980 t = *slot;
11981 if (!t)
11983 /* Insert this one into the hash table. */
11984 t = cl_optimization_node;
11985 *slot = t;
11987 /* Make a new node for next time round. */
11988 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11991 return t;
11994 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11996 tree
11997 build_target_option_node (struct gcc_options *opts)
11999 tree t;
12001 /* Use the cache of optimization nodes. */
12003 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12004 opts);
12006 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12007 t = *slot;
12008 if (!t)
12010 /* Insert this one into the hash table. */
12011 t = cl_target_option_node;
12012 *slot = t;
12014 /* Make a new node for next time round. */
12015 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12018 return t;
12021 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12022 so that they aren't saved during PCH writing. */
12024 void
12025 prepare_target_option_nodes_for_pch (void)
12027 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12028 for (; iter != cl_option_hash_table->end (); ++iter)
12029 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12030 TREE_TARGET_GLOBALS (*iter) = NULL;
12033 /* Determine the "ultimate origin" of a block. The block may be an inlined
12034 instance of an inlined instance of a block which is local to an inline
12035 function, so we have to trace all of the way back through the origin chain
12036 to find out what sort of node actually served as the original seed for the
12037 given block. */
12039 tree
12040 block_ultimate_origin (const_tree block)
12042 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12044 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12045 we're trying to output the abstract instance of this function. */
12046 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12047 return NULL_TREE;
12049 if (immediate_origin == NULL_TREE)
12050 return NULL_TREE;
12051 else
12053 tree ret_val;
12054 tree lookahead = immediate_origin;
12058 ret_val = lookahead;
12059 lookahead = (TREE_CODE (ret_val) == BLOCK
12060 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12062 while (lookahead != NULL && lookahead != ret_val);
12064 /* The block's abstract origin chain may not be the *ultimate* origin of
12065 the block. It could lead to a DECL that has an abstract origin set.
12066 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12067 will give us if it has one). Note that DECL's abstract origins are
12068 supposed to be the most distant ancestor (or so decl_ultimate_origin
12069 claims), so we don't need to loop following the DECL origins. */
12070 if (DECL_P (ret_val))
12071 return DECL_ORIGIN (ret_val);
12073 return ret_val;
12077 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12078 no instruction. */
12080 bool
12081 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12083 /* Do not strip casts into or out of differing address spaces. */
12084 if (POINTER_TYPE_P (outer_type)
12085 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12087 if (!POINTER_TYPE_P (inner_type)
12088 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12089 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12090 return false;
12092 else if (POINTER_TYPE_P (inner_type)
12093 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12095 /* We already know that outer_type is not a pointer with
12096 a non-generic address space. */
12097 return false;
12100 /* Use precision rather then machine mode when we can, which gives
12101 the correct answer even for submode (bit-field) types. */
12102 if ((INTEGRAL_TYPE_P (outer_type)
12103 || POINTER_TYPE_P (outer_type)
12104 || TREE_CODE (outer_type) == OFFSET_TYPE)
12105 && (INTEGRAL_TYPE_P (inner_type)
12106 || POINTER_TYPE_P (inner_type)
12107 || TREE_CODE (inner_type) == OFFSET_TYPE))
12108 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12110 /* Otherwise fall back on comparing machine modes (e.g. for
12111 aggregate types, floats). */
12112 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12115 /* Return true iff conversion in EXP generates no instruction. Mark
12116 it inline so that we fully inline into the stripping functions even
12117 though we have two uses of this function. */
12119 static inline bool
12120 tree_nop_conversion (const_tree exp)
12122 tree outer_type, inner_type;
12124 if (location_wrapper_p (exp))
12125 return true;
12126 if (!CONVERT_EXPR_P (exp)
12127 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12128 return false;
12129 if (TREE_OPERAND (exp, 0) == error_mark_node)
12130 return false;
12132 outer_type = TREE_TYPE (exp);
12133 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12135 if (!inner_type)
12136 return false;
12138 return tree_nop_conversion_p (outer_type, inner_type);
12141 /* Return true iff conversion in EXP generates no instruction. Don't
12142 consider conversions changing the signedness. */
12144 static bool
12145 tree_sign_nop_conversion (const_tree exp)
12147 tree outer_type, inner_type;
12149 if (!tree_nop_conversion (exp))
12150 return false;
12152 outer_type = TREE_TYPE (exp);
12153 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12155 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12156 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12159 /* Strip conversions from EXP according to tree_nop_conversion and
12160 return the resulting expression. */
12162 tree
12163 tree_strip_nop_conversions (tree exp)
12165 while (tree_nop_conversion (exp))
12166 exp = TREE_OPERAND (exp, 0);
12167 return exp;
12170 /* Strip conversions from EXP according to tree_sign_nop_conversion
12171 and return the resulting expression. */
12173 tree
12174 tree_strip_sign_nop_conversions (tree exp)
12176 while (tree_sign_nop_conversion (exp))
12177 exp = TREE_OPERAND (exp, 0);
12178 return exp;
12181 /* Avoid any floating point extensions from EXP. */
12182 tree
12183 strip_float_extensions (tree exp)
12185 tree sub, expt, subt;
12187 /* For floating point constant look up the narrowest type that can hold
12188 it properly and handle it like (type)(narrowest_type)constant.
12189 This way we can optimize for instance a=a*2.0 where "a" is float
12190 but 2.0 is double constant. */
12191 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12193 REAL_VALUE_TYPE orig;
12194 tree type = NULL;
12196 orig = TREE_REAL_CST (exp);
12197 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12198 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12199 type = float_type_node;
12200 else if (TYPE_PRECISION (TREE_TYPE (exp))
12201 > TYPE_PRECISION (double_type_node)
12202 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12203 type = double_type_node;
12204 if (type)
12205 return build_real_truncate (type, orig);
12208 if (!CONVERT_EXPR_P (exp))
12209 return exp;
12211 sub = TREE_OPERAND (exp, 0);
12212 subt = TREE_TYPE (sub);
12213 expt = TREE_TYPE (exp);
12215 if (!FLOAT_TYPE_P (subt))
12216 return exp;
12218 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12219 return exp;
12221 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12222 return exp;
12224 return strip_float_extensions (sub);
12227 /* Strip out all handled components that produce invariant
12228 offsets. */
12230 const_tree
12231 strip_invariant_refs (const_tree op)
12233 while (handled_component_p (op))
12235 switch (TREE_CODE (op))
12237 case ARRAY_REF:
12238 case ARRAY_RANGE_REF:
12239 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12240 || TREE_OPERAND (op, 2) != NULL_TREE
12241 || TREE_OPERAND (op, 3) != NULL_TREE)
12242 return NULL;
12243 break;
12245 case COMPONENT_REF:
12246 if (TREE_OPERAND (op, 2) != NULL_TREE)
12247 return NULL;
12248 break;
12250 default:;
12252 op = TREE_OPERAND (op, 0);
12255 return op;
12258 static GTY(()) tree gcc_eh_personality_decl;
12260 /* Return the GCC personality function decl. */
12262 tree
12263 lhd_gcc_personality (void)
12265 if (!gcc_eh_personality_decl)
12266 gcc_eh_personality_decl = build_personality_function ("gcc");
12267 return gcc_eh_personality_decl;
12270 /* TARGET is a call target of GIMPLE call statement
12271 (obtained by gimple_call_fn). Return true if it is
12272 OBJ_TYPE_REF representing an virtual call of C++ method.
12273 (As opposed to OBJ_TYPE_REF representing objc calls
12274 through a cast where middle-end devirtualization machinery
12275 can't apply.) */
12277 bool
12278 virtual_method_call_p (const_tree target)
12280 if (TREE_CODE (target) != OBJ_TYPE_REF)
12281 return false;
12282 tree t = TREE_TYPE (target);
12283 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12284 t = TREE_TYPE (t);
12285 if (TREE_CODE (t) == FUNCTION_TYPE)
12286 return false;
12287 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12288 /* If we do not have BINFO associated, it means that type was built
12289 without devirtualization enabled. Do not consider this a virtual
12290 call. */
12291 if (!TYPE_BINFO (obj_type_ref_class (target)))
12292 return false;
12293 return true;
12296 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12298 tree
12299 obj_type_ref_class (const_tree ref)
12301 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12302 ref = TREE_TYPE (ref);
12303 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12304 ref = TREE_TYPE (ref);
12305 /* We look for type THIS points to. ObjC also builds
12306 OBJ_TYPE_REF with non-method calls, Their first parameter
12307 ID however also corresponds to class type. */
12308 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12309 || TREE_CODE (ref) == FUNCTION_TYPE);
12310 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12311 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12312 return TREE_TYPE (ref);
12315 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12317 static tree
12318 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12320 unsigned int i;
12321 tree base_binfo, b;
12323 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12324 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12325 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12326 return base_binfo;
12327 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12328 return b;
12329 return NULL;
12332 /* Try to find a base info of BINFO that would have its field decl at offset
12333 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12334 found, return, otherwise return NULL_TREE. */
12336 tree
12337 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12339 tree type = BINFO_TYPE (binfo);
12341 while (true)
12343 HOST_WIDE_INT pos, size;
12344 tree fld;
12345 int i;
12347 if (types_same_for_odr (type, expected_type))
12348 return binfo;
12349 if (maybe_lt (offset, 0))
12350 return NULL_TREE;
12352 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12354 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12355 continue;
12357 pos = int_bit_position (fld);
12358 size = tree_to_uhwi (DECL_SIZE (fld));
12359 if (known_in_range_p (offset, pos, size))
12360 break;
12362 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12363 return NULL_TREE;
12365 /* Offset 0 indicates the primary base, whose vtable contents are
12366 represented in the binfo for the derived class. */
12367 else if (maybe_ne (offset, 0))
12369 tree found_binfo = NULL, base_binfo;
12370 /* Offsets in BINFO are in bytes relative to the whole structure
12371 while POS is in bits relative to the containing field. */
12372 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12373 / BITS_PER_UNIT);
12375 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12376 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12377 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12379 found_binfo = base_binfo;
12380 break;
12382 if (found_binfo)
12383 binfo = found_binfo;
12384 else
12385 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12386 binfo_offset);
12389 type = TREE_TYPE (fld);
12390 offset -= pos;
12394 /* Returns true if X is a typedef decl. */
12396 bool
12397 is_typedef_decl (const_tree x)
12399 return (x && TREE_CODE (x) == TYPE_DECL
12400 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12403 /* Returns true iff TYPE is a type variant created for a typedef. */
12405 bool
12406 typedef_variant_p (const_tree type)
12408 return is_typedef_decl (TYPE_NAME (type));
12411 /* Warn about a use of an identifier which was marked deprecated. */
12412 void
12413 warn_deprecated_use (tree node, tree attr)
12415 const char *msg;
12417 if (node == 0 || !warn_deprecated_decl)
12418 return;
12420 if (!attr)
12422 if (DECL_P (node))
12423 attr = DECL_ATTRIBUTES (node);
12424 else if (TYPE_P (node))
12426 tree decl = TYPE_STUB_DECL (node);
12427 if (decl)
12428 attr = lookup_attribute ("deprecated",
12429 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12433 if (attr)
12434 attr = lookup_attribute ("deprecated", attr);
12436 if (attr)
12437 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12438 else
12439 msg = NULL;
12441 bool w;
12442 if (DECL_P (node))
12444 if (msg)
12445 w = warning (OPT_Wdeprecated_declarations,
12446 "%qD is deprecated: %s", node, msg);
12447 else
12448 w = warning (OPT_Wdeprecated_declarations,
12449 "%qD is deprecated", node);
12450 if (w)
12451 inform (DECL_SOURCE_LOCATION (node), "declared here");
12453 else if (TYPE_P (node))
12455 tree what = NULL_TREE;
12456 tree decl = TYPE_STUB_DECL (node);
12458 if (TYPE_NAME (node))
12460 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12461 what = TYPE_NAME (node);
12462 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12463 && DECL_NAME (TYPE_NAME (node)))
12464 what = DECL_NAME (TYPE_NAME (node));
12467 if (decl)
12469 if (what)
12471 if (msg)
12472 w = warning (OPT_Wdeprecated_declarations,
12473 "%qE is deprecated: %s", what, msg);
12474 else
12475 w = warning (OPT_Wdeprecated_declarations,
12476 "%qE is deprecated", what);
12478 else
12480 if (msg)
12481 w = warning (OPT_Wdeprecated_declarations,
12482 "type is deprecated: %s", msg);
12483 else
12484 w = warning (OPT_Wdeprecated_declarations,
12485 "type is deprecated");
12487 if (w)
12488 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12490 else
12492 if (what)
12494 if (msg)
12495 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12496 what, msg);
12497 else
12498 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12500 else
12502 if (msg)
12503 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12504 msg);
12505 else
12506 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12512 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12513 somewhere in it. */
12515 bool
12516 contains_bitfld_component_ref_p (const_tree ref)
12518 while (handled_component_p (ref))
12520 if (TREE_CODE (ref) == COMPONENT_REF
12521 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12522 return true;
12523 ref = TREE_OPERAND (ref, 0);
12526 return false;
12529 /* Try to determine whether a TRY_CATCH expression can fall through.
12530 This is a subroutine of block_may_fallthru. */
12532 static bool
12533 try_catch_may_fallthru (const_tree stmt)
12535 tree_stmt_iterator i;
12537 /* If the TRY block can fall through, the whole TRY_CATCH can
12538 fall through. */
12539 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12540 return true;
12542 i = tsi_start (TREE_OPERAND (stmt, 1));
12543 switch (TREE_CODE (tsi_stmt (i)))
12545 case CATCH_EXPR:
12546 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12547 catch expression and a body. The whole TRY_CATCH may fall
12548 through iff any of the catch bodies falls through. */
12549 for (; !tsi_end_p (i); tsi_next (&i))
12551 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12552 return true;
12554 return false;
12556 case EH_FILTER_EXPR:
12557 /* The exception filter expression only matters if there is an
12558 exception. If the exception does not match EH_FILTER_TYPES,
12559 we will execute EH_FILTER_FAILURE, and we will fall through
12560 if that falls through. If the exception does match
12561 EH_FILTER_TYPES, the stack unwinder will continue up the
12562 stack, so we will not fall through. We don't know whether we
12563 will throw an exception which matches EH_FILTER_TYPES or not,
12564 so we just ignore EH_FILTER_TYPES and assume that we might
12565 throw an exception which doesn't match. */
12566 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12568 default:
12569 /* This case represents statements to be executed when an
12570 exception occurs. Those statements are implicitly followed
12571 by a RESX statement to resume execution after the exception.
12572 So in this case the TRY_CATCH never falls through. */
12573 return false;
12577 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12578 need not be 100% accurate; simply be conservative and return true if we
12579 don't know. This is used only to avoid stupidly generating extra code.
12580 If we're wrong, we'll just delete the extra code later. */
12582 bool
12583 block_may_fallthru (const_tree block)
12585 /* This CONST_CAST is okay because expr_last returns its argument
12586 unmodified and we assign it to a const_tree. */
12587 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12589 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12591 case GOTO_EXPR:
12592 case RETURN_EXPR:
12593 /* Easy cases. If the last statement of the block implies
12594 control transfer, then we can't fall through. */
12595 return false;
12597 case SWITCH_EXPR:
12598 /* If there is a default: label or case labels cover all possible
12599 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12600 to some case label in all cases and all we care is whether the
12601 SWITCH_BODY falls through. */
12602 if (SWITCH_ALL_CASES_P (stmt))
12603 return block_may_fallthru (SWITCH_BODY (stmt));
12604 return true;
12606 case COND_EXPR:
12607 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12608 return true;
12609 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12611 case BIND_EXPR:
12612 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12614 case TRY_CATCH_EXPR:
12615 return try_catch_may_fallthru (stmt);
12617 case TRY_FINALLY_EXPR:
12618 /* The finally clause is always executed after the try clause,
12619 so if it does not fall through, then the try-finally will not
12620 fall through. Otherwise, if the try clause does not fall
12621 through, then when the finally clause falls through it will
12622 resume execution wherever the try clause was going. So the
12623 whole try-finally will only fall through if both the try
12624 clause and the finally clause fall through. */
12625 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12626 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12628 case MODIFY_EXPR:
12629 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12630 stmt = TREE_OPERAND (stmt, 1);
12631 else
12632 return true;
12633 /* FALLTHRU */
12635 case CALL_EXPR:
12636 /* Functions that do not return do not fall through. */
12637 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12639 case CLEANUP_POINT_EXPR:
12640 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12642 case TARGET_EXPR:
12643 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12645 case ERROR_MARK:
12646 return true;
12648 default:
12649 return lang_hooks.block_may_fallthru (stmt);
12653 /* True if we are using EH to handle cleanups. */
12654 static bool using_eh_for_cleanups_flag = false;
12656 /* This routine is called from front ends to indicate eh should be used for
12657 cleanups. */
12658 void
12659 using_eh_for_cleanups (void)
12661 using_eh_for_cleanups_flag = true;
12664 /* Query whether EH is used for cleanups. */
12665 bool
12666 using_eh_for_cleanups_p (void)
12668 return using_eh_for_cleanups_flag;
12671 /* Wrapper for tree_code_name to ensure that tree code is valid */
12672 const char *
12673 get_tree_code_name (enum tree_code code)
12675 const char *invalid = "<invalid tree code>";
12677 if (code >= MAX_TREE_CODES)
12678 return invalid;
12680 return tree_code_name[code];
12683 /* Drops the TREE_OVERFLOW flag from T. */
12685 tree
12686 drop_tree_overflow (tree t)
12688 gcc_checking_assert (TREE_OVERFLOW (t));
12690 /* For tree codes with a sharing machinery re-build the result. */
12691 if (poly_int_tree_p (t))
12692 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12694 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12695 and canonicalize the result. */
12696 if (TREE_CODE (t) == VECTOR_CST)
12698 tree_vector_builder builder;
12699 builder.new_unary_operation (TREE_TYPE (t), t, true);
12700 unsigned int count = builder.encoded_nelts ();
12701 for (unsigned int i = 0; i < count; ++i)
12703 tree elt = VECTOR_CST_ELT (t, i);
12704 if (TREE_OVERFLOW (elt))
12705 elt = drop_tree_overflow (elt);
12706 builder.quick_push (elt);
12708 return builder.build ();
12711 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12712 and drop the flag. */
12713 t = copy_node (t);
12714 TREE_OVERFLOW (t) = 0;
12716 /* For constants that contain nested constants, drop the flag
12717 from those as well. */
12718 if (TREE_CODE (t) == COMPLEX_CST)
12720 if (TREE_OVERFLOW (TREE_REALPART (t)))
12721 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12722 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12723 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12726 return t;
12729 /* Given a memory reference expression T, return its base address.
12730 The base address of a memory reference expression is the main
12731 object being referenced. For instance, the base address for
12732 'array[i].fld[j]' is 'array'. You can think of this as stripping
12733 away the offset part from a memory address.
12735 This function calls handled_component_p to strip away all the inner
12736 parts of the memory reference until it reaches the base object. */
12738 tree
12739 get_base_address (tree t)
12741 while (handled_component_p (t))
12742 t = TREE_OPERAND (t, 0);
12744 if ((TREE_CODE (t) == MEM_REF
12745 || TREE_CODE (t) == TARGET_MEM_REF)
12746 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12747 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12749 /* ??? Either the alias oracle or all callers need to properly deal
12750 with WITH_SIZE_EXPRs before we can look through those. */
12751 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12752 return NULL_TREE;
12754 return t;
12757 /* Return a tree of sizetype representing the size, in bytes, of the element
12758 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12760 tree
12761 array_ref_element_size (tree exp)
12763 tree aligned_size = TREE_OPERAND (exp, 3);
12764 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12765 location_t loc = EXPR_LOCATION (exp);
12767 /* If a size was specified in the ARRAY_REF, it's the size measured
12768 in alignment units of the element type. So multiply by that value. */
12769 if (aligned_size)
12771 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12772 sizetype from another type of the same width and signedness. */
12773 if (TREE_TYPE (aligned_size) != sizetype)
12774 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12775 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12776 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12779 /* Otherwise, take the size from that of the element type. Substitute
12780 any PLACEHOLDER_EXPR that we have. */
12781 else
12782 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12785 /* Return a tree representing the lower bound of the array mentioned in
12786 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12788 tree
12789 array_ref_low_bound (tree exp)
12791 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12793 /* If a lower bound is specified in EXP, use it. */
12794 if (TREE_OPERAND (exp, 2))
12795 return TREE_OPERAND (exp, 2);
12797 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12798 substituting for a PLACEHOLDER_EXPR as needed. */
12799 if (domain_type && TYPE_MIN_VALUE (domain_type))
12800 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12802 /* Otherwise, return a zero of the appropriate type. */
12803 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12806 /* Return a tree representing the upper bound of the array mentioned in
12807 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12809 tree
12810 array_ref_up_bound (tree exp)
12812 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12814 /* If there is a domain type and it has an upper bound, use it, substituting
12815 for a PLACEHOLDER_EXPR as needed. */
12816 if (domain_type && TYPE_MAX_VALUE (domain_type))
12817 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12819 /* Otherwise fail. */
12820 return NULL_TREE;
12823 /* Returns true if REF is an array reference or a component reference
12824 to an array at the end of a structure.
12825 If this is the case, the array may be allocated larger
12826 than its upper bound implies. */
12828 bool
12829 array_at_struct_end_p (tree ref)
12831 tree atype;
12833 if (TREE_CODE (ref) == ARRAY_REF
12834 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12836 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12837 ref = TREE_OPERAND (ref, 0);
12839 else if (TREE_CODE (ref) == COMPONENT_REF
12840 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12841 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12842 else
12843 return false;
12845 if (TREE_CODE (ref) == STRING_CST)
12846 return false;
12848 tree ref_to_array = ref;
12849 while (handled_component_p (ref))
12851 /* If the reference chain contains a component reference to a
12852 non-union type and there follows another field the reference
12853 is not at the end of a structure. */
12854 if (TREE_CODE (ref) == COMPONENT_REF)
12856 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12858 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12859 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12860 nextf = DECL_CHAIN (nextf);
12861 if (nextf)
12862 return false;
12865 /* If we have a multi-dimensional array we do not consider
12866 a non-innermost dimension as flex array if the whole
12867 multi-dimensional array is at struct end.
12868 Same for an array of aggregates with a trailing array
12869 member. */
12870 else if (TREE_CODE (ref) == ARRAY_REF)
12871 return false;
12872 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12874 /* If we view an underlying object as sth else then what we
12875 gathered up to now is what we have to rely on. */
12876 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12877 break;
12878 else
12879 gcc_unreachable ();
12881 ref = TREE_OPERAND (ref, 0);
12884 /* The array now is at struct end. Treat flexible arrays as
12885 always subject to extend, even into just padding constrained by
12886 an underlying decl. */
12887 if (! TYPE_SIZE (atype)
12888 || ! TYPE_DOMAIN (atype)
12889 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12890 return true;
12892 if (TREE_CODE (ref) == MEM_REF
12893 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12894 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12896 /* If the reference is based on a declared entity, the size of the array
12897 is constrained by its given domain. (Do not trust commons PR/69368). */
12898 if (DECL_P (ref)
12899 && !(flag_unconstrained_commons
12900 && VAR_P (ref) && DECL_COMMON (ref))
12901 && DECL_SIZE_UNIT (ref)
12902 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12904 /* Check whether the array domain covers all of the available
12905 padding. */
12906 poly_int64 offset;
12907 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12908 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12909 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12910 return true;
12911 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12912 return true;
12914 /* If at least one extra element fits it is a flexarray. */
12915 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12916 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12917 + 2)
12918 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12919 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12920 return true;
12922 return false;
12925 return true;
12928 /* Return a tree representing the offset, in bytes, of the field referenced
12929 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12931 tree
12932 component_ref_field_offset (tree exp)
12934 tree aligned_offset = TREE_OPERAND (exp, 2);
12935 tree field = TREE_OPERAND (exp, 1);
12936 location_t loc = EXPR_LOCATION (exp);
12938 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12939 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12940 value. */
12941 if (aligned_offset)
12943 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12944 sizetype from another type of the same width and signedness. */
12945 if (TREE_TYPE (aligned_offset) != sizetype)
12946 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12947 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12948 size_int (DECL_OFFSET_ALIGN (field)
12949 / BITS_PER_UNIT));
12952 /* Otherwise, take the offset from that of the field. Substitute
12953 any PLACEHOLDER_EXPR that we have. */
12954 else
12955 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12958 /* Return the machine mode of T. For vectors, returns the mode of the
12959 inner type. The main use case is to feed the result to HONOR_NANS,
12960 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12962 machine_mode
12963 element_mode (const_tree t)
12965 if (!TYPE_P (t))
12966 t = TREE_TYPE (t);
12967 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12968 t = TREE_TYPE (t);
12969 return TYPE_MODE (t);
12972 /* Vector types need to re-check the target flags each time we report
12973 the machine mode. We need to do this because attribute target can
12974 change the result of vector_mode_supported_p and have_regs_of_mode
12975 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12976 change on a per-function basis. */
12977 /* ??? Possibly a better solution is to run through all the types
12978 referenced by a function and re-compute the TYPE_MODE once, rather
12979 than make the TYPE_MODE macro call a function. */
12981 machine_mode
12982 vector_type_mode (const_tree t)
12984 machine_mode mode;
12986 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12988 mode = t->type_common.mode;
12989 if (VECTOR_MODE_P (mode)
12990 && (!targetm.vector_mode_supported_p (mode)
12991 || !have_regs_of_mode[mode]))
12993 scalar_int_mode innermode;
12995 /* For integers, try mapping it to a same-sized scalar mode. */
12996 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12998 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12999 * GET_MODE_BITSIZE (innermode));
13000 scalar_int_mode mode;
13001 if (int_mode_for_size (size, 0).exists (&mode)
13002 && have_regs_of_mode[mode])
13003 return mode;
13006 return BLKmode;
13009 return mode;
13012 /* Verify that basic properties of T match TV and thus T can be a variant of
13013 TV. TV should be the more specified variant (i.e. the main variant). */
13015 static bool
13016 verify_type_variant (const_tree t, tree tv)
13018 /* Type variant can differ by:
13020 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13021 ENCODE_QUAL_ADDR_SPACE.
13022 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13023 in this case some values may not be set in the variant types
13024 (see TYPE_COMPLETE_P checks).
13025 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13026 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13027 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13028 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13029 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13030 this is necessary to make it possible to merge types form different TUs
13031 - arrays, pointers and references may have TREE_TYPE that is a variant
13032 of TREE_TYPE of their main variants.
13033 - aggregates may have new TYPE_FIELDS list that list variants of
13034 the main variant TYPE_FIELDS.
13035 - vector types may differ by TYPE_VECTOR_OPAQUE
13038 /* Convenience macro for matching individual fields. */
13039 #define verify_variant_match(flag) \
13040 do { \
13041 if (flag (tv) != flag (t)) \
13043 error ("type variant differs by %s", #flag); \
13044 debug_tree (tv); \
13045 return false; \
13047 } while (false)
13049 /* tree_base checks. */
13051 verify_variant_match (TREE_CODE);
13052 /* FIXME: Ada builds non-artificial variants of artificial types. */
13053 if (TYPE_ARTIFICIAL (tv) && 0)
13054 verify_variant_match (TYPE_ARTIFICIAL);
13055 if (POINTER_TYPE_P (tv))
13056 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13057 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13058 verify_variant_match (TYPE_UNSIGNED);
13059 verify_variant_match (TYPE_PACKED);
13060 if (TREE_CODE (t) == REFERENCE_TYPE)
13061 verify_variant_match (TYPE_REF_IS_RVALUE);
13062 if (AGGREGATE_TYPE_P (t))
13063 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13064 else
13065 verify_variant_match (TYPE_SATURATING);
13066 /* FIXME: This check trigger during libstdc++ build. */
13067 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13068 verify_variant_match (TYPE_FINAL_P);
13070 /* tree_type_common checks. */
13072 if (COMPLETE_TYPE_P (t))
13074 verify_variant_match (TYPE_MODE);
13075 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13076 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13077 verify_variant_match (TYPE_SIZE);
13078 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13079 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13080 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13082 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13083 TYPE_SIZE_UNIT (tv), 0));
13084 error ("type variant has different TYPE_SIZE_UNIT");
13085 debug_tree (tv);
13086 error ("type variant's TYPE_SIZE_UNIT");
13087 debug_tree (TYPE_SIZE_UNIT (tv));
13088 error ("type's TYPE_SIZE_UNIT");
13089 debug_tree (TYPE_SIZE_UNIT (t));
13090 return false;
13093 verify_variant_match (TYPE_PRECISION);
13094 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13095 if (RECORD_OR_UNION_TYPE_P (t))
13096 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13097 else if (TREE_CODE (t) == ARRAY_TYPE)
13098 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13099 /* During LTO we merge variant lists from diferent translation units
13100 that may differ BY TYPE_CONTEXT that in turn may point
13101 to TRANSLATION_UNIT_DECL.
13102 Ada also builds variants of types with different TYPE_CONTEXT. */
13103 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13104 verify_variant_match (TYPE_CONTEXT);
13105 verify_variant_match (TYPE_STRING_FLAG);
13106 if (TYPE_ALIAS_SET_KNOWN_P (t))
13108 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13109 debug_tree (tv);
13110 return false;
13113 /* tree_type_non_common checks. */
13115 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13116 and dangle the pointer from time to time. */
13117 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13118 && (in_lto_p || !TYPE_VFIELD (tv)
13119 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13121 error ("type variant has different TYPE_VFIELD");
13122 debug_tree (tv);
13123 return false;
13125 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13126 || TREE_CODE (t) == INTEGER_TYPE
13127 || TREE_CODE (t) == BOOLEAN_TYPE
13128 || TREE_CODE (t) == REAL_TYPE
13129 || TREE_CODE (t) == FIXED_POINT_TYPE)
13131 verify_variant_match (TYPE_MAX_VALUE);
13132 verify_variant_match (TYPE_MIN_VALUE);
13134 if (TREE_CODE (t) == METHOD_TYPE)
13135 verify_variant_match (TYPE_METHOD_BASETYPE);
13136 if (TREE_CODE (t) == OFFSET_TYPE)
13137 verify_variant_match (TYPE_OFFSET_BASETYPE);
13138 if (TREE_CODE (t) == ARRAY_TYPE)
13139 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13140 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13141 or even type's main variant. This is needed to make bootstrap pass
13142 and the bug seems new in GCC 5.
13143 C++ FE should be updated to make this consistent and we should check
13144 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13145 is a match with main variant.
13147 Also disable the check for Java for now because of parser hack that builds
13148 first an dummy BINFO and then sometimes replace it by real BINFO in some
13149 of the copies. */
13150 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13151 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13152 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13153 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13154 at LTO time only. */
13155 && (in_lto_p && odr_type_p (t)))
13157 error ("type variant has different TYPE_BINFO");
13158 debug_tree (tv);
13159 error ("type variant's TYPE_BINFO");
13160 debug_tree (TYPE_BINFO (tv));
13161 error ("type's TYPE_BINFO");
13162 debug_tree (TYPE_BINFO (t));
13163 return false;
13166 /* Check various uses of TYPE_VALUES_RAW. */
13167 if (TREE_CODE (t) == ENUMERAL_TYPE)
13168 verify_variant_match (TYPE_VALUES);
13169 else if (TREE_CODE (t) == ARRAY_TYPE)
13170 verify_variant_match (TYPE_DOMAIN);
13171 /* Permit incomplete variants of complete type. While FEs may complete
13172 all variants, this does not happen for C++ templates in all cases. */
13173 else if (RECORD_OR_UNION_TYPE_P (t)
13174 && COMPLETE_TYPE_P (t)
13175 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13177 tree f1, f2;
13179 /* Fortran builds qualified variants as new records with items of
13180 qualified type. Verify that they looks same. */
13181 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13182 f1 && f2;
13183 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13184 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13185 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13186 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13187 /* FIXME: gfc_nonrestricted_type builds all types as variants
13188 with exception of pointer types. It deeply copies the type
13189 which means that we may end up with a variant type
13190 referring non-variant pointer. We may change it to
13191 produce types as variants, too, like
13192 objc_get_protocol_qualified_type does. */
13193 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13194 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13195 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13196 break;
13197 if (f1 || f2)
13199 error ("type variant has different TYPE_FIELDS");
13200 debug_tree (tv);
13201 error ("first mismatch is field");
13202 debug_tree (f1);
13203 error ("and field");
13204 debug_tree (f2);
13205 return false;
13208 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13209 verify_variant_match (TYPE_ARG_TYPES);
13210 /* For C++ the qualified variant of array type is really an array type
13211 of qualified TREE_TYPE.
13212 objc builds variants of pointer where pointer to type is a variant, too
13213 in objc_get_protocol_qualified_type. */
13214 if (TREE_TYPE (t) != TREE_TYPE (tv)
13215 && ((TREE_CODE (t) != ARRAY_TYPE
13216 && !POINTER_TYPE_P (t))
13217 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13218 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13220 error ("type variant has different TREE_TYPE");
13221 debug_tree (tv);
13222 error ("type variant's TREE_TYPE");
13223 debug_tree (TREE_TYPE (tv));
13224 error ("type's TREE_TYPE");
13225 debug_tree (TREE_TYPE (t));
13226 return false;
13228 if (type_with_alias_set_p (t)
13229 && !gimple_canonical_types_compatible_p (t, tv, false))
13231 error ("type is not compatible with its variant");
13232 debug_tree (tv);
13233 error ("type variant's TREE_TYPE");
13234 debug_tree (TREE_TYPE (tv));
13235 error ("type's TREE_TYPE");
13236 debug_tree (TREE_TYPE (t));
13237 return false;
13239 return true;
13240 #undef verify_variant_match
13244 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13245 the middle-end types_compatible_p function. It needs to avoid
13246 claiming types are different for types that should be treated
13247 the same with respect to TBAA. Canonical types are also used
13248 for IL consistency checks via the useless_type_conversion_p
13249 predicate which does not handle all type kinds itself but falls
13250 back to pointer-comparison of TYPE_CANONICAL for aggregates
13251 for example. */
13253 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13254 type calculation because we need to allow inter-operability between signed
13255 and unsigned variants. */
13257 bool
13258 type_with_interoperable_signedness (const_tree type)
13260 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13261 signed char and unsigned char. Similarly fortran FE builds
13262 C_SIZE_T as signed type, while C defines it unsigned. */
13264 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13265 == INTEGER_TYPE
13266 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13267 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13270 /* Return true iff T1 and T2 are structurally identical for what
13271 TBAA is concerned.
13272 This function is used both by lto.c canonical type merging and by the
13273 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13274 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13275 only for LTO because only in these cases TYPE_CANONICAL equivalence
13276 correspond to one defined by gimple_canonical_types_compatible_p. */
13278 bool
13279 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13280 bool trust_type_canonical)
13282 /* Type variants should be same as the main variant. When not doing sanity
13283 checking to verify this fact, go to main variants and save some work. */
13284 if (trust_type_canonical)
13286 t1 = TYPE_MAIN_VARIANT (t1);
13287 t2 = TYPE_MAIN_VARIANT (t2);
13290 /* Check first for the obvious case of pointer identity. */
13291 if (t1 == t2)
13292 return true;
13294 /* Check that we have two types to compare. */
13295 if (t1 == NULL_TREE || t2 == NULL_TREE)
13296 return false;
13298 /* We consider complete types always compatible with incomplete type.
13299 This does not make sense for canonical type calculation and thus we
13300 need to ensure that we are never called on it.
13302 FIXME: For more correctness the function probably should have three modes
13303 1) mode assuming that types are complete mathcing their structure
13304 2) mode allowing incomplete types but producing equivalence classes
13305 and thus ignoring all info from complete types
13306 3) mode allowing incomplete types to match complete but checking
13307 compatibility between complete types.
13309 1 and 2 can be used for canonical type calculation. 3 is the real
13310 definition of type compatibility that can be used i.e. for warnings during
13311 declaration merging. */
13313 gcc_assert (!trust_type_canonical
13314 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13315 /* If the types have been previously registered and found equal
13316 they still are. */
13318 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13319 && trust_type_canonical)
13321 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13322 they are always NULL, but they are set to non-NULL for types
13323 constructed by build_pointer_type and variants. In this case the
13324 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13325 all pointers are considered equal. Be sure to not return false
13326 negatives. */
13327 gcc_checking_assert (canonical_type_used_p (t1)
13328 && canonical_type_used_p (t2));
13329 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13332 /* Can't be the same type if the types don't have the same code. */
13333 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13334 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13335 return false;
13337 /* Qualifiers do not matter for canonical type comparison purposes. */
13339 /* Void types and nullptr types are always the same. */
13340 if (TREE_CODE (t1) == VOID_TYPE
13341 || TREE_CODE (t1) == NULLPTR_TYPE)
13342 return true;
13344 /* Can't be the same type if they have different mode. */
13345 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13346 return false;
13348 /* Non-aggregate types can be handled cheaply. */
13349 if (INTEGRAL_TYPE_P (t1)
13350 || SCALAR_FLOAT_TYPE_P (t1)
13351 || FIXED_POINT_TYPE_P (t1)
13352 || TREE_CODE (t1) == VECTOR_TYPE
13353 || TREE_CODE (t1) == COMPLEX_TYPE
13354 || TREE_CODE (t1) == OFFSET_TYPE
13355 || POINTER_TYPE_P (t1))
13357 /* Can't be the same type if they have different recision. */
13358 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13359 return false;
13361 /* In some cases the signed and unsigned types are required to be
13362 inter-operable. */
13363 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13364 && !type_with_interoperable_signedness (t1))
13365 return false;
13367 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13368 interoperable with "signed char". Unless all frontends are revisited
13369 to agree on these types, we must ignore the flag completely. */
13371 /* Fortran standard define C_PTR type that is compatible with every
13372 C pointer. For this reason we need to glob all pointers into one.
13373 Still pointers in different address spaces are not compatible. */
13374 if (POINTER_TYPE_P (t1))
13376 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13377 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13378 return false;
13381 /* Tail-recurse to components. */
13382 if (TREE_CODE (t1) == VECTOR_TYPE
13383 || TREE_CODE (t1) == COMPLEX_TYPE)
13384 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13385 TREE_TYPE (t2),
13386 trust_type_canonical);
13388 return true;
13391 /* Do type-specific comparisons. */
13392 switch (TREE_CODE (t1))
13394 case ARRAY_TYPE:
13395 /* Array types are the same if the element types are the same and
13396 the number of elements are the same. */
13397 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13398 trust_type_canonical)
13399 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13400 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13401 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13402 return false;
13403 else
13405 tree i1 = TYPE_DOMAIN (t1);
13406 tree i2 = TYPE_DOMAIN (t2);
13408 /* For an incomplete external array, the type domain can be
13409 NULL_TREE. Check this condition also. */
13410 if (i1 == NULL_TREE && i2 == NULL_TREE)
13411 return true;
13412 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13413 return false;
13414 else
13416 tree min1 = TYPE_MIN_VALUE (i1);
13417 tree min2 = TYPE_MIN_VALUE (i2);
13418 tree max1 = TYPE_MAX_VALUE (i1);
13419 tree max2 = TYPE_MAX_VALUE (i2);
13421 /* The minimum/maximum values have to be the same. */
13422 if ((min1 == min2
13423 || (min1 && min2
13424 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13425 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13426 || operand_equal_p (min1, min2, 0))))
13427 && (max1 == max2
13428 || (max1 && max2
13429 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13430 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13431 || operand_equal_p (max1, max2, 0)))))
13432 return true;
13433 else
13434 return false;
13438 case METHOD_TYPE:
13439 case FUNCTION_TYPE:
13440 /* Function types are the same if the return type and arguments types
13441 are the same. */
13442 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13443 trust_type_canonical))
13444 return false;
13446 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13447 return true;
13448 else
13450 tree parms1, parms2;
13452 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13453 parms1 && parms2;
13454 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13456 if (!gimple_canonical_types_compatible_p
13457 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13458 trust_type_canonical))
13459 return false;
13462 if (parms1 || parms2)
13463 return false;
13465 return true;
13468 case RECORD_TYPE:
13469 case UNION_TYPE:
13470 case QUAL_UNION_TYPE:
13472 tree f1, f2;
13474 /* Don't try to compare variants of an incomplete type, before
13475 TYPE_FIELDS has been copied around. */
13476 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13477 return true;
13480 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13481 return false;
13483 /* For aggregate types, all the fields must be the same. */
13484 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13485 f1 || f2;
13486 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13488 /* Skip non-fields and zero-sized fields. */
13489 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13490 || (DECL_SIZE (f1)
13491 && integer_zerop (DECL_SIZE (f1)))))
13492 f1 = TREE_CHAIN (f1);
13493 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13494 || (DECL_SIZE (f2)
13495 && integer_zerop (DECL_SIZE (f2)))))
13496 f2 = TREE_CHAIN (f2);
13497 if (!f1 || !f2)
13498 break;
13499 /* The fields must have the same name, offset and type. */
13500 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13501 || !gimple_compare_field_offset (f1, f2)
13502 || !gimple_canonical_types_compatible_p
13503 (TREE_TYPE (f1), TREE_TYPE (f2),
13504 trust_type_canonical))
13505 return false;
13508 /* If one aggregate has more fields than the other, they
13509 are not the same. */
13510 if (f1 || f2)
13511 return false;
13513 return true;
13516 default:
13517 /* Consider all types with language specific trees in them mutually
13518 compatible. This is executed only from verify_type and false
13519 positives can be tolerated. */
13520 gcc_assert (!in_lto_p);
13521 return true;
13525 /* Verify type T. */
13527 void
13528 verify_type (const_tree t)
13530 bool error_found = false;
13531 tree mv = TYPE_MAIN_VARIANT (t);
13532 if (!mv)
13534 error ("Main variant is not defined");
13535 error_found = true;
13537 else if (mv != TYPE_MAIN_VARIANT (mv))
13539 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13540 debug_tree (mv);
13541 error_found = true;
13543 else if (t != mv && !verify_type_variant (t, mv))
13544 error_found = true;
13546 tree ct = TYPE_CANONICAL (t);
13547 if (!ct)
13549 else if (TYPE_CANONICAL (t) != ct)
13551 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13552 debug_tree (ct);
13553 error_found = true;
13555 /* Method and function types can not be used to address memory and thus
13556 TYPE_CANONICAL really matters only for determining useless conversions.
13558 FIXME: C++ FE produce declarations of builtin functions that are not
13559 compatible with main variants. */
13560 else if (TREE_CODE (t) == FUNCTION_TYPE)
13562 else if (t != ct
13563 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13564 with variably sized arrays because their sizes possibly
13565 gimplified to different variables. */
13566 && !variably_modified_type_p (ct, NULL)
13567 && !gimple_canonical_types_compatible_p (t, ct, false))
13569 error ("TYPE_CANONICAL is not compatible");
13570 debug_tree (ct);
13571 error_found = true;
13574 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13575 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13577 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13578 debug_tree (ct);
13579 error_found = true;
13581 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13583 error ("TYPE_CANONICAL of main variant is not main variant");
13584 debug_tree (ct);
13585 debug_tree (TYPE_MAIN_VARIANT (ct));
13586 error_found = true;
13590 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13591 if (RECORD_OR_UNION_TYPE_P (t))
13593 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13594 and danagle the pointer from time to time. */
13595 if (TYPE_VFIELD (t)
13596 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13597 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13599 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13600 debug_tree (TYPE_VFIELD (t));
13601 error_found = true;
13604 else if (TREE_CODE (t) == POINTER_TYPE)
13606 if (TYPE_NEXT_PTR_TO (t)
13607 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13609 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13610 debug_tree (TYPE_NEXT_PTR_TO (t));
13611 error_found = true;
13614 else if (TREE_CODE (t) == REFERENCE_TYPE)
13616 if (TYPE_NEXT_REF_TO (t)
13617 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13619 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13620 debug_tree (TYPE_NEXT_REF_TO (t));
13621 error_found = true;
13624 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13625 || TREE_CODE (t) == FIXED_POINT_TYPE)
13627 /* FIXME: The following check should pass:
13628 useless_type_conversion_p (const_cast <tree> (t),
13629 TREE_TYPE (TYPE_MIN_VALUE (t))
13630 but does not for C sizetypes in LTO. */
13633 /* Check various uses of TYPE_MAXVAL_RAW. */
13634 if (RECORD_OR_UNION_TYPE_P (t))
13636 if (!TYPE_BINFO (t))
13638 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13640 error ("TYPE_BINFO is not TREE_BINFO");
13641 debug_tree (TYPE_BINFO (t));
13642 error_found = true;
13644 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13646 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13647 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13648 error_found = true;
13651 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13653 if (TYPE_METHOD_BASETYPE (t)
13654 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13655 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13657 error ("TYPE_METHOD_BASETYPE is not record nor union");
13658 debug_tree (TYPE_METHOD_BASETYPE (t));
13659 error_found = true;
13662 else if (TREE_CODE (t) == OFFSET_TYPE)
13664 if (TYPE_OFFSET_BASETYPE (t)
13665 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13666 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13668 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13669 debug_tree (TYPE_OFFSET_BASETYPE (t));
13670 error_found = true;
13673 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13674 || TREE_CODE (t) == FIXED_POINT_TYPE)
13676 /* FIXME: The following check should pass:
13677 useless_type_conversion_p (const_cast <tree> (t),
13678 TREE_TYPE (TYPE_MAX_VALUE (t))
13679 but does not for C sizetypes in LTO. */
13681 else if (TREE_CODE (t) == ARRAY_TYPE)
13683 if (TYPE_ARRAY_MAX_SIZE (t)
13684 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13686 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13687 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13688 error_found = true;
13691 else if (TYPE_MAX_VALUE_RAW (t))
13693 error ("TYPE_MAX_VALUE_RAW non-NULL");
13694 debug_tree (TYPE_MAX_VALUE_RAW (t));
13695 error_found = true;
13698 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13700 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13701 debug_tree (TYPE_LANG_SLOT_1 (t));
13702 error_found = true;
13705 /* Check various uses of TYPE_VALUES_RAW. */
13706 if (TREE_CODE (t) == ENUMERAL_TYPE)
13707 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13709 tree value = TREE_VALUE (l);
13710 tree name = TREE_PURPOSE (l);
13712 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13713 CONST_DECL of ENUMERAL TYPE. */
13714 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13716 error ("Enum value is not CONST_DECL or INTEGER_CST");
13717 debug_tree (value);
13718 debug_tree (name);
13719 error_found = true;
13721 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13722 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13724 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13725 debug_tree (value);
13726 debug_tree (name);
13727 error_found = true;
13729 if (TREE_CODE (name) != IDENTIFIER_NODE)
13731 error ("Enum value name is not IDENTIFIER_NODE");
13732 debug_tree (value);
13733 debug_tree (name);
13734 error_found = true;
13737 else if (TREE_CODE (t) == ARRAY_TYPE)
13739 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13741 error ("Array TYPE_DOMAIN is not integer type");
13742 debug_tree (TYPE_DOMAIN (t));
13743 error_found = true;
13746 else if (RECORD_OR_UNION_TYPE_P (t))
13748 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13750 error ("TYPE_FIELDS defined in incomplete type");
13751 error_found = true;
13753 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13755 /* TODO: verify properties of decls. */
13756 if (TREE_CODE (fld) == FIELD_DECL)
13758 else if (TREE_CODE (fld) == TYPE_DECL)
13760 else if (TREE_CODE (fld) == CONST_DECL)
13762 else if (VAR_P (fld))
13764 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13766 else if (TREE_CODE (fld) == USING_DECL)
13768 else if (TREE_CODE (fld) == FUNCTION_DECL)
13770 else
13772 error ("Wrong tree in TYPE_FIELDS list");
13773 debug_tree (fld);
13774 error_found = true;
13778 else if (TREE_CODE (t) == INTEGER_TYPE
13779 || TREE_CODE (t) == BOOLEAN_TYPE
13780 || TREE_CODE (t) == OFFSET_TYPE
13781 || TREE_CODE (t) == REFERENCE_TYPE
13782 || TREE_CODE (t) == NULLPTR_TYPE
13783 || TREE_CODE (t) == POINTER_TYPE)
13785 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13787 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13788 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13789 error_found = true;
13791 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13793 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13794 debug_tree (TYPE_CACHED_VALUES (t));
13795 error_found = true;
13797 /* Verify just enough of cache to ensure that no one copied it to new type.
13798 All copying should go by copy_node that should clear it. */
13799 else if (TYPE_CACHED_VALUES_P (t))
13801 int i;
13802 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13803 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13804 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13806 error ("wrong TYPE_CACHED_VALUES entry");
13807 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13808 error_found = true;
13809 break;
13813 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13814 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13816 /* C++ FE uses TREE_PURPOSE to store initial values. */
13817 if (TREE_PURPOSE (l) && in_lto_p)
13819 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13820 debug_tree (l);
13821 error_found = true;
13823 if (!TYPE_P (TREE_VALUE (l)))
13825 error ("Wrong entry in TYPE_ARG_TYPES list");
13826 debug_tree (l);
13827 error_found = true;
13830 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13832 error ("TYPE_VALUES_RAW field is non-NULL");
13833 debug_tree (TYPE_VALUES_RAW (t));
13834 error_found = true;
13836 if (TREE_CODE (t) != INTEGER_TYPE
13837 && TREE_CODE (t) != BOOLEAN_TYPE
13838 && TREE_CODE (t) != OFFSET_TYPE
13839 && TREE_CODE (t) != REFERENCE_TYPE
13840 && TREE_CODE (t) != NULLPTR_TYPE
13841 && TREE_CODE (t) != POINTER_TYPE
13842 && TYPE_CACHED_VALUES_P (t))
13844 error ("TYPE_CACHED_VALUES_P is set while it should not");
13845 error_found = true;
13847 if (TYPE_STRING_FLAG (t)
13848 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13850 error ("TYPE_STRING_FLAG is set on wrong type code");
13851 error_found = true;
13854 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13855 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13856 of a type. */
13857 if (TREE_CODE (t) == METHOD_TYPE
13858 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13860 error ("TYPE_METHOD_BASETYPE is not main variant");
13861 error_found = true;
13864 if (error_found)
13866 debug_tree (const_cast <tree> (t));
13867 internal_error ("verify_type failed");
13872 /* Return 1 if ARG interpreted as signed in its precision is known to be
13873 always positive or 2 if ARG is known to be always negative, or 3 if
13874 ARG may be positive or negative. */
13877 get_range_pos_neg (tree arg)
13879 if (arg == error_mark_node)
13880 return 3;
13882 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13883 int cnt = 0;
13884 if (TREE_CODE (arg) == INTEGER_CST)
13886 wide_int w = wi::sext (wi::to_wide (arg), prec);
13887 if (wi::neg_p (w))
13888 return 2;
13889 else
13890 return 1;
13892 while (CONVERT_EXPR_P (arg)
13893 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13894 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13896 arg = TREE_OPERAND (arg, 0);
13897 /* Narrower value zero extended into wider type
13898 will always result in positive values. */
13899 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13900 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13901 return 1;
13902 prec = TYPE_PRECISION (TREE_TYPE (arg));
13903 if (++cnt > 30)
13904 return 3;
13907 if (TREE_CODE (arg) != SSA_NAME)
13908 return 3;
13909 wide_int arg_min, arg_max;
13910 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13912 gimple *g = SSA_NAME_DEF_STMT (arg);
13913 if (is_gimple_assign (g)
13914 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13916 tree t = gimple_assign_rhs1 (g);
13917 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13918 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13920 if (TYPE_UNSIGNED (TREE_TYPE (t))
13921 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13922 return 1;
13923 prec = TYPE_PRECISION (TREE_TYPE (t));
13924 arg = t;
13925 if (++cnt > 30)
13926 return 3;
13927 continue;
13930 return 3;
13932 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13934 /* For unsigned values, the "positive" range comes
13935 below the "negative" range. */
13936 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13937 return 1;
13938 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13939 return 2;
13941 else
13943 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13944 return 1;
13945 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13946 return 2;
13948 return 3;
13954 /* Return true if ARG is marked with the nonnull attribute in the
13955 current function signature. */
13957 bool
13958 nonnull_arg_p (const_tree arg)
13960 tree t, attrs, fntype;
13961 unsigned HOST_WIDE_INT arg_num;
13963 gcc_assert (TREE_CODE (arg) == PARM_DECL
13964 && (POINTER_TYPE_P (TREE_TYPE (arg))
13965 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13967 /* The static chain decl is always non null. */
13968 if (arg == cfun->static_chain_decl)
13969 return true;
13971 /* THIS argument of method is always non-NULL. */
13972 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13973 && arg == DECL_ARGUMENTS (cfun->decl)
13974 && flag_delete_null_pointer_checks)
13975 return true;
13977 /* Values passed by reference are always non-NULL. */
13978 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13979 && flag_delete_null_pointer_checks)
13980 return true;
13982 fntype = TREE_TYPE (cfun->decl);
13983 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13985 attrs = lookup_attribute ("nonnull", attrs);
13987 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13988 if (attrs == NULL_TREE)
13989 return false;
13991 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13992 if (TREE_VALUE (attrs) == NULL_TREE)
13993 return true;
13995 /* Get the position number for ARG in the function signature. */
13996 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13998 t = DECL_CHAIN (t), arg_num++)
14000 if (t == arg)
14001 break;
14004 gcc_assert (t == arg);
14006 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14007 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14009 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14010 return true;
14014 return false;
14017 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14018 information. */
14020 location_t
14021 set_block (location_t loc, tree block)
14023 location_t pure_loc = get_pure_location (loc);
14024 source_range src_range = get_range_from_loc (line_table, loc);
14025 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14028 location_t
14029 set_source_range (tree expr, location_t start, location_t finish)
14031 source_range src_range;
14032 src_range.m_start = start;
14033 src_range.m_finish = finish;
14034 return set_source_range (expr, src_range);
14037 location_t
14038 set_source_range (tree expr, source_range src_range)
14040 if (!EXPR_P (expr))
14041 return UNKNOWN_LOCATION;
14043 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14044 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14045 pure_loc,
14046 src_range,
14047 NULL);
14048 SET_EXPR_LOCATION (expr, adhoc);
14049 return adhoc;
14052 /* Return EXPR, potentially wrapped with a node expression LOC,
14053 if !CAN_HAVE_LOCATION_P (expr).
14055 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14056 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14058 Wrapper nodes can be identified using location_wrapper_p. */
14060 tree
14061 maybe_wrap_with_location (tree expr, location_t loc)
14063 if (expr == NULL)
14064 return NULL;
14065 if (loc == UNKNOWN_LOCATION)
14066 return expr;
14067 if (CAN_HAVE_LOCATION_P (expr))
14068 return expr;
14069 /* We should only be adding wrappers for constants and for decls,
14070 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14071 gcc_assert (CONSTANT_CLASS_P (expr)
14072 || DECL_P (expr)
14073 || EXCEPTIONAL_CLASS_P (expr));
14075 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14076 any impact of the wrapper nodes. */
14077 if (EXCEPTIONAL_CLASS_P (expr))
14078 return expr;
14080 tree_code code
14081 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14082 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14083 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14084 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14085 /* Mark this node as being a wrapper. */
14086 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14087 return wrapper;
14090 /* Return the name of combined function FN, for debugging purposes. */
14092 const char *
14093 combined_fn_name (combined_fn fn)
14095 if (builtin_fn_p (fn))
14097 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14098 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14100 else
14101 return internal_fn_name (as_internal_fn (fn));
14104 /* Return a bitmap with a bit set corresponding to each argument in
14105 a function call type FNTYPE declared with attribute nonnull,
14106 or null if none of the function's argument are nonnull. The caller
14107 must free the bitmap. */
14109 bitmap
14110 get_nonnull_args (const_tree fntype)
14112 if (fntype == NULL_TREE)
14113 return NULL;
14115 tree attrs = TYPE_ATTRIBUTES (fntype);
14116 if (!attrs)
14117 return NULL;
14119 bitmap argmap = NULL;
14121 /* A function declaration can specify multiple attribute nonnull,
14122 each with zero or more arguments. The loop below creates a bitmap
14123 representing a union of all the arguments. An empty (but non-null)
14124 bitmap means that all arguments have been declaraed nonnull. */
14125 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14127 attrs = lookup_attribute ("nonnull", attrs);
14128 if (!attrs)
14129 break;
14131 if (!argmap)
14132 argmap = BITMAP_ALLOC (NULL);
14134 if (!TREE_VALUE (attrs))
14136 /* Clear the bitmap in case a previous attribute nonnull
14137 set it and this one overrides it for all arguments. */
14138 bitmap_clear (argmap);
14139 return argmap;
14142 /* Iterate over the indices of the format arguments declared nonnull
14143 and set a bit for each. */
14144 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14146 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14147 bitmap_set_bit (argmap, val);
14151 return argmap;
14154 /* Returns true if TYPE is a type where it and all of its subobjects
14155 (recursively) are of structure, union, or array type. */
14157 static bool
14158 default_is_empty_type (tree type)
14160 if (RECORD_OR_UNION_TYPE_P (type))
14162 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14163 if (TREE_CODE (field) == FIELD_DECL
14164 && !DECL_PADDING_P (field)
14165 && !default_is_empty_type (TREE_TYPE (field)))
14166 return false;
14167 return true;
14169 else if (TREE_CODE (type) == ARRAY_TYPE)
14170 return (integer_minus_onep (array_type_nelts (type))
14171 || TYPE_DOMAIN (type) == NULL_TREE
14172 || default_is_empty_type (TREE_TYPE (type)));
14173 return false;
14176 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14177 that shouldn't be passed via stack. */
14179 bool
14180 default_is_empty_record (const_tree type)
14182 if (!abi_version_at_least (12))
14183 return false;
14185 if (type == error_mark_node)
14186 return false;
14188 if (TREE_ADDRESSABLE (type))
14189 return false;
14191 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
14194 /* Like int_size_in_bytes, but handle empty records specially. */
14196 HOST_WIDE_INT
14197 arg_int_size_in_bytes (const_tree type)
14199 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14202 /* Like size_in_bytes, but handle empty records specially. */
14204 tree
14205 arg_size_in_bytes (const_tree type)
14207 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14210 /* Return true if an expression with CODE has to have the same result type as
14211 its first operand. */
14213 bool
14214 expr_type_first_operand_type_p (tree_code code)
14216 switch (code)
14218 case NEGATE_EXPR:
14219 case ABS_EXPR:
14220 case BIT_NOT_EXPR:
14221 case PAREN_EXPR:
14222 case CONJ_EXPR:
14224 case PLUS_EXPR:
14225 case MINUS_EXPR:
14226 case MULT_EXPR:
14227 case TRUNC_DIV_EXPR:
14228 case CEIL_DIV_EXPR:
14229 case FLOOR_DIV_EXPR:
14230 case ROUND_DIV_EXPR:
14231 case TRUNC_MOD_EXPR:
14232 case CEIL_MOD_EXPR:
14233 case FLOOR_MOD_EXPR:
14234 case ROUND_MOD_EXPR:
14235 case RDIV_EXPR:
14236 case EXACT_DIV_EXPR:
14237 case MIN_EXPR:
14238 case MAX_EXPR:
14239 case BIT_IOR_EXPR:
14240 case BIT_XOR_EXPR:
14241 case BIT_AND_EXPR:
14243 case LSHIFT_EXPR:
14244 case RSHIFT_EXPR:
14245 case LROTATE_EXPR:
14246 case RROTATE_EXPR:
14247 return true;
14249 default:
14250 return false;
14254 /* List of pointer types used to declare builtins before we have seen their
14255 real declaration.
14257 Keep the size up to date in tree.h ! */
14258 const builtin_structptr_type builtin_structptr_types[6] =
14260 { fileptr_type_node, ptr_type_node, "FILE" },
14261 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14262 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14263 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14264 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14265 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14268 #if CHECKING_P
14270 namespace selftest {
14272 /* Selftests for tree. */
14274 /* Verify that integer constants are sane. */
14276 static void
14277 test_integer_constants ()
14279 ASSERT_TRUE (integer_type_node != NULL);
14280 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14282 tree type = integer_type_node;
14284 tree zero = build_zero_cst (type);
14285 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14286 ASSERT_EQ (type, TREE_TYPE (zero));
14288 tree one = build_int_cst (type, 1);
14289 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14290 ASSERT_EQ (type, TREE_TYPE (zero));
14293 /* Verify identifiers. */
14295 static void
14296 test_identifiers ()
14298 tree identifier = get_identifier ("foo");
14299 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14300 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14303 /* Verify LABEL_DECL. */
14305 static void
14306 test_labels ()
14308 tree identifier = get_identifier ("err");
14309 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14310 identifier, void_type_node);
14311 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14312 ASSERT_FALSE (FORCED_LABEL (label_decl));
14315 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14316 are given by VALS. */
14318 static tree
14319 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14321 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14322 tree_vector_builder builder (type, vals.length (), 1);
14323 builder.splice (vals);
14324 return builder.build ();
14327 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14329 static void
14330 check_vector_cst (vec<tree> expected, tree actual)
14332 ASSERT_KNOWN_EQ (expected.length (),
14333 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14334 for (unsigned int i = 0; i < expected.length (); ++i)
14335 ASSERT_EQ (wi::to_wide (expected[i]),
14336 wi::to_wide (vector_cst_elt (actual, i)));
14339 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14340 and that its elements match EXPECTED. */
14342 static void
14343 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14344 unsigned int npatterns)
14346 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14347 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14348 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14349 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14350 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14351 check_vector_cst (expected, actual);
14354 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14355 and NPATTERNS background elements, and that its elements match
14356 EXPECTED. */
14358 static void
14359 check_vector_cst_fill (vec<tree> expected, tree actual,
14360 unsigned int npatterns)
14362 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14363 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14364 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14365 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14366 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14367 check_vector_cst (expected, actual);
14370 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14371 and that its elements match EXPECTED. */
14373 static void
14374 check_vector_cst_stepped (vec<tree> expected, tree actual,
14375 unsigned int npatterns)
14377 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14378 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14379 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14380 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14381 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14382 check_vector_cst (expected, actual);
14385 /* Test the creation of VECTOR_CSTs. */
14387 static void
14388 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14390 auto_vec<tree, 8> elements (8);
14391 elements.quick_grow (8);
14392 tree element_type = build_nonstandard_integer_type (16, true);
14393 tree vector_type = build_vector_type (element_type, 8);
14395 /* Test a simple linear series with a base of 0 and a step of 1:
14396 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14397 for (unsigned int i = 0; i < 8; ++i)
14398 elements[i] = build_int_cst (element_type, i);
14399 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14400 check_vector_cst_stepped (elements, vector, 1);
14402 /* Try the same with the first element replaced by 100:
14403 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14404 elements[0] = build_int_cst (element_type, 100);
14405 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14406 check_vector_cst_stepped (elements, vector, 1);
14408 /* Try a series that wraps around.
14409 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14410 for (unsigned int i = 1; i < 8; ++i)
14411 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14412 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14413 check_vector_cst_stepped (elements, vector, 1);
14415 /* Try a downward series:
14416 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14417 for (unsigned int i = 1; i < 8; ++i)
14418 elements[i] = build_int_cst (element_type, 80 - i);
14419 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14420 check_vector_cst_stepped (elements, vector, 1);
14422 /* Try two interleaved series with different bases and steps:
14423 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14424 elements[1] = build_int_cst (element_type, 53);
14425 for (unsigned int i = 2; i < 8; i += 2)
14427 elements[i] = build_int_cst (element_type, 70 - i * 2);
14428 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14430 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14431 check_vector_cst_stepped (elements, vector, 2);
14433 /* Try a duplicated value:
14434 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14435 for (unsigned int i = 1; i < 8; ++i)
14436 elements[i] = elements[0];
14437 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14438 check_vector_cst_duplicate (elements, vector, 1);
14440 /* Try an interleaved duplicated value:
14441 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14442 elements[1] = build_int_cst (element_type, 55);
14443 for (unsigned int i = 2; i < 8; ++i)
14444 elements[i] = elements[i - 2];
14445 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14446 check_vector_cst_duplicate (elements, vector, 2);
14448 /* Try a duplicated value with 2 exceptions
14449 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14450 elements[0] = build_int_cst (element_type, 41);
14451 elements[1] = build_int_cst (element_type, 97);
14452 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14453 check_vector_cst_fill (elements, vector, 2);
14455 /* Try with and without a step
14456 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14457 for (unsigned int i = 3; i < 8; i += 2)
14458 elements[i] = build_int_cst (element_type, i * 7);
14459 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14460 check_vector_cst_stepped (elements, vector, 2);
14462 /* Try a fully-general constant:
14463 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14464 elements[5] = build_int_cst (element_type, 9990);
14465 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14466 check_vector_cst_fill (elements, vector, 4);
14469 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14470 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14471 modifying its argument in-place. */
14473 static void
14474 check_strip_nops (tree node, tree expected)
14476 STRIP_NOPS (node);
14477 ASSERT_EQ (expected, node);
14480 /* Verify location wrappers. */
14482 static void
14483 test_location_wrappers ()
14485 location_t loc = BUILTINS_LOCATION;
14487 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14489 /* Wrapping a constant. */
14490 tree int_cst = build_int_cst (integer_type_node, 42);
14491 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14492 ASSERT_FALSE (location_wrapper_p (int_cst));
14494 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14495 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14496 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14497 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14499 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14500 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14502 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14503 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14504 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14505 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14507 /* Wrapping a STRING_CST. */
14508 tree string_cst = build_string (4, "foo");
14509 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14510 ASSERT_FALSE (location_wrapper_p (string_cst));
14512 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14513 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14514 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14515 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14516 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14519 /* Wrapping a variable. */
14520 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14521 get_identifier ("some_int_var"),
14522 integer_type_node);
14523 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14524 ASSERT_FALSE (location_wrapper_p (int_var));
14526 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14527 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14528 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14529 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14531 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14532 wrapper. */
14533 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14534 ASSERT_FALSE (location_wrapper_p (r_cast));
14535 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14537 /* Verify that STRIP_NOPS removes wrappers. */
14538 check_strip_nops (wrapped_int_cst, int_cst);
14539 check_strip_nops (wrapped_string_cst, string_cst);
14540 check_strip_nops (wrapped_int_var, int_var);
14543 /* Run all of the selftests within this file. */
14545 void
14546 tree_c_tests ()
14548 test_integer_constants ();
14549 test_identifiers ();
14550 test_labels ();
14551 test_vector_cst_patterns ();
14552 test_location_wrappers ();
14555 } // namespace selftest
14557 #endif /* CHECKING_P */
14559 #include "gt-tree.h"