* ipa-cp.c (update_profiling_info): Handle conversion to local
[official-gcc.git] / gcc / tree.c
blob72da68322cce27536bf2059799ba979c5457b0e2
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
70 /* Tree code classes. */
72 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
73 #define END_OF_BASE_TREE_CODES tcc_exceptional,
75 const enum tree_code_class tree_code_type[] = {
76 #include "all-tree.def"
79 #undef DEFTREECODE
80 #undef END_OF_BASE_TREE_CODES
82 /* Table indexed by tree code giving number of expression
83 operands beyond the fixed part of the node structure.
84 Not used for types or decls. */
86 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
87 #define END_OF_BASE_TREE_CODES 0,
89 const unsigned char tree_code_length[] = {
90 #include "all-tree.def"
93 #undef DEFTREECODE
94 #undef END_OF_BASE_TREE_CODES
96 /* Names of tree components.
97 Used for printing out the tree and error messages. */
98 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
99 #define END_OF_BASE_TREE_CODES "@dummy",
101 static const char *const tree_code_name[] = {
102 #include "all-tree.def"
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
108 /* Each tree code class has an associated string representation.
109 These must correspond to the tree_code_class entries. */
111 const char *const tree_code_class_strings[] =
113 "exceptional",
114 "constant",
115 "type",
116 "declaration",
117 "reference",
118 "comparison",
119 "unary",
120 "binary",
121 "statement",
122 "vl_exp",
123 "expression"
126 /* obstack.[ch] explicitly declined to prototype this. */
127 extern int _obstack_allocated_p (struct obstack *h, void *obj);
129 /* Statistics-gathering stuff. */
131 static int tree_code_counts[MAX_TREE_CODES];
132 int tree_node_counts[(int) all_kinds];
133 int tree_node_sizes[(int) all_kinds];
135 /* Keep in sync with tree.h:enum tree_node_kind. */
136 static const char * const tree_node_kind_names[] = {
137 "decls",
138 "types",
139 "blocks",
140 "stmts",
141 "refs",
142 "exprs",
143 "constants",
144 "identifiers",
145 "vecs",
146 "binfos",
147 "ssa names",
148 "constructors",
149 "random kinds",
150 "lang_decl kinds",
151 "lang_type kinds",
152 "omp clauses",
155 /* Unique id for next decl created. */
156 static GTY(()) int next_decl_uid;
157 /* Unique id for next type created. */
158 static GTY(()) unsigned next_type_uid = 1;
159 /* Unique id for next debug decl created. Use negative numbers,
160 to catch erroneous uses. */
161 static GTY(()) int next_debug_decl_uid;
163 /* Since we cannot rehash a type after it is in the table, we have to
164 keep the hash code. */
166 struct GTY((for_user)) type_hash {
167 unsigned long hash;
168 tree type;
171 /* Initial size of the hash table (rounded to next prime). */
172 #define TYPE_HASH_INITIAL_SIZE 1000
174 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
176 static hashval_t hash (type_hash *t) { return t->hash; }
177 static bool equal (type_hash *a, type_hash *b);
179 static int
180 keep_cache_entry (type_hash *&t)
182 return ggc_marked_p (t->type);
186 /* Now here is the hash table. When recording a type, it is added to
187 the slot whose index is the hash code. Note that the hash table is
188 used for several kinds of types (function types, array types and
189 array index range types, for now). While all these live in the
190 same table, they are completely independent, and the hash code is
191 computed differently for each of these. */
193 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
198 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
200 static hashval_t hash (tree t);
201 static bool equal (tree x, tree y);
204 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
206 /* Hash table for optimization flags and target option flags. Use the same
207 hash table for both sets of options. Nodes for building the current
208 optimization and target option nodes. The assumption is most of the time
209 the options created will already be in the hash table, so we avoid
210 allocating and freeing up a node repeatably. */
211 static GTY (()) tree cl_optimization_node;
212 static GTY (()) tree cl_target_option_node;
214 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
216 static hashval_t hash (tree t);
217 static bool equal (tree x, tree y);
220 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
222 /* General tree->tree mapping structure for use in hash tables. */
225 static GTY ((cache))
226 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
228 static GTY ((cache))
229 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
231 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
233 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
235 static bool
236 equal (tree_vec_map *a, tree_vec_map *b)
238 return a->base.from == b->base.from;
241 static int
242 keep_cache_entry (tree_vec_map *&m)
244 return ggc_marked_p (m->base.from);
248 static GTY ((cache))
249 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
251 static void set_type_quals (tree, int);
252 static void print_type_hash_statistics (void);
253 static void print_debug_expr_statistics (void);
254 static void print_value_expr_statistics (void);
256 tree global_trees[TI_MAX];
257 tree integer_types[itk_none];
259 bool int_n_enabled_p[NUM_INT_N_ENTS];
260 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
262 bool tree_contains_struct[MAX_TREE_CODES][64];
264 /* Number of operands for each OpenMP clause. */
265 unsigned const char omp_clause_num_ops[] =
267 0, /* OMP_CLAUSE_ERROR */
268 1, /* OMP_CLAUSE_PRIVATE */
269 1, /* OMP_CLAUSE_SHARED */
270 1, /* OMP_CLAUSE_FIRSTPRIVATE */
271 2, /* OMP_CLAUSE_LASTPRIVATE */
272 5, /* OMP_CLAUSE_REDUCTION */
273 1, /* OMP_CLAUSE_COPYIN */
274 1, /* OMP_CLAUSE_COPYPRIVATE */
275 3, /* OMP_CLAUSE_LINEAR */
276 2, /* OMP_CLAUSE_ALIGNED */
277 1, /* OMP_CLAUSE_DEPEND */
278 1, /* OMP_CLAUSE_UNIFORM */
279 1, /* OMP_CLAUSE_TO_DECLARE */
280 1, /* OMP_CLAUSE_LINK */
281 2, /* OMP_CLAUSE_FROM */
282 2, /* OMP_CLAUSE_TO */
283 2, /* OMP_CLAUSE_MAP */
284 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
285 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
286 2, /* OMP_CLAUSE__CACHE_ */
287 2, /* OMP_CLAUSE_GANG */
288 1, /* OMP_CLAUSE_ASYNC */
289 1, /* OMP_CLAUSE_WAIT */
290 0, /* OMP_CLAUSE_AUTO */
291 0, /* OMP_CLAUSE_SEQ */
292 1, /* OMP_CLAUSE__LOOPTEMP_ */
293 1, /* OMP_CLAUSE_IF */
294 1, /* OMP_CLAUSE_NUM_THREADS */
295 1, /* OMP_CLAUSE_SCHEDULE */
296 0, /* OMP_CLAUSE_NOWAIT */
297 1, /* OMP_CLAUSE_ORDERED */
298 0, /* OMP_CLAUSE_DEFAULT */
299 3, /* OMP_CLAUSE_COLLAPSE */
300 0, /* OMP_CLAUSE_UNTIED */
301 1, /* OMP_CLAUSE_FINAL */
302 0, /* OMP_CLAUSE_MERGEABLE */
303 1, /* OMP_CLAUSE_DEVICE */
304 1, /* OMP_CLAUSE_DIST_SCHEDULE */
305 0, /* OMP_CLAUSE_INBRANCH */
306 0, /* OMP_CLAUSE_NOTINBRANCH */
307 1, /* OMP_CLAUSE_NUM_TEAMS */
308 1, /* OMP_CLAUSE_THREAD_LIMIT */
309 0, /* OMP_CLAUSE_PROC_BIND */
310 1, /* OMP_CLAUSE_SAFELEN */
311 1, /* OMP_CLAUSE_SIMDLEN */
312 0, /* OMP_CLAUSE_FOR */
313 0, /* OMP_CLAUSE_PARALLEL */
314 0, /* OMP_CLAUSE_SECTIONS */
315 0, /* OMP_CLAUSE_TASKGROUP */
316 1, /* OMP_CLAUSE_PRIORITY */
317 1, /* OMP_CLAUSE_GRAINSIZE */
318 1, /* OMP_CLAUSE_NUM_TASKS */
319 0, /* OMP_CLAUSE_NOGROUP */
320 0, /* OMP_CLAUSE_THREADS */
321 0, /* OMP_CLAUSE_SIMD */
322 1, /* OMP_CLAUSE_HINT */
323 0, /* OMP_CLAUSE_DEFALTMAP */
324 1, /* OMP_CLAUSE__SIMDUID_ */
325 0, /* OMP_CLAUSE__SIMT_ */
326 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 3, /* OMP_CLAUSE_TILE */
334 2, /* OMP_CLAUSE__GRIDDIM_ */
337 const char * const omp_clause_code_name[] =
339 "error_clause",
340 "private",
341 "shared",
342 "firstprivate",
343 "lastprivate",
344 "reduction",
345 "copyin",
346 "copyprivate",
347 "linear",
348 "aligned",
349 "depend",
350 "uniform",
351 "to",
352 "link",
353 "from",
354 "to",
355 "map",
356 "use_device_ptr",
357 "is_device_ptr",
358 "_cache_",
359 "gang",
360 "async",
361 "wait",
362 "auto",
363 "seq",
364 "_looptemp_",
365 "if",
366 "num_threads",
367 "schedule",
368 "nowait",
369 "ordered",
370 "default",
371 "collapse",
372 "untied",
373 "final",
374 "mergeable",
375 "device",
376 "dist_schedule",
377 "inbranch",
378 "notinbranch",
379 "num_teams",
380 "thread_limit",
381 "proc_bind",
382 "safelen",
383 "simdlen",
384 "for",
385 "parallel",
386 "sections",
387 "taskgroup",
388 "priority",
389 "grainsize",
390 "num_tasks",
391 "nogroup",
392 "threads",
393 "simd",
394 "hint",
395 "defaultmap",
396 "_simduid_",
397 "_simt_",
398 "_Cilk_for_count_",
399 "independent",
400 "worker",
401 "vector",
402 "num_gangs",
403 "num_workers",
404 "vector_length",
405 "tile",
406 "_griddim_"
410 /* Return the tree node structure used by tree code CODE. */
412 static inline enum tree_node_structure_enum
413 tree_node_structure_for_code (enum tree_code code)
415 switch (TREE_CODE_CLASS (code))
417 case tcc_declaration:
419 switch (code)
421 case FIELD_DECL:
422 return TS_FIELD_DECL;
423 case PARM_DECL:
424 return TS_PARM_DECL;
425 case VAR_DECL:
426 return TS_VAR_DECL;
427 case LABEL_DECL:
428 return TS_LABEL_DECL;
429 case RESULT_DECL:
430 return TS_RESULT_DECL;
431 case DEBUG_EXPR_DECL:
432 return TS_DECL_WRTL;
433 case CONST_DECL:
434 return TS_CONST_DECL;
435 case TYPE_DECL:
436 return TS_TYPE_DECL;
437 case FUNCTION_DECL:
438 return TS_FUNCTION_DECL;
439 case TRANSLATION_UNIT_DECL:
440 return TS_TRANSLATION_UNIT_DECL;
441 default:
442 return TS_DECL_NON_COMMON;
445 case tcc_type:
446 return TS_TYPE_NON_COMMON;
447 case tcc_reference:
448 case tcc_comparison:
449 case tcc_unary:
450 case tcc_binary:
451 case tcc_expression:
452 case tcc_statement:
453 case tcc_vl_exp:
454 return TS_EXP;
455 default: /* tcc_constant and tcc_exceptional */
456 break;
458 switch (code)
460 /* tcc_constant cases. */
461 case VOID_CST: return TS_TYPED;
462 case INTEGER_CST: return TS_INT_CST;
463 case REAL_CST: return TS_REAL_CST;
464 case FIXED_CST: return TS_FIXED_CST;
465 case COMPLEX_CST: return TS_COMPLEX;
466 case VECTOR_CST: return TS_VECTOR;
467 case STRING_CST: return TS_STRING;
468 /* tcc_exceptional cases. */
469 case ERROR_MARK: return TS_COMMON;
470 case IDENTIFIER_NODE: return TS_IDENTIFIER;
471 case TREE_LIST: return TS_LIST;
472 case TREE_VEC: return TS_VEC;
473 case SSA_NAME: return TS_SSA_NAME;
474 case PLACEHOLDER_EXPR: return TS_COMMON;
475 case STATEMENT_LIST: return TS_STATEMENT_LIST;
476 case BLOCK: return TS_BLOCK;
477 case CONSTRUCTOR: return TS_CONSTRUCTOR;
478 case TREE_BINFO: return TS_BINFO;
479 case OMP_CLAUSE: return TS_OMP_CLAUSE;
480 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
481 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
483 default:
484 gcc_unreachable ();
489 /* Initialize tree_contains_struct to describe the hierarchy of tree
490 nodes. */
492 static void
493 initialize_tree_contains_struct (void)
495 unsigned i;
497 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
499 enum tree_code code;
500 enum tree_node_structure_enum ts_code;
502 code = (enum tree_code) i;
503 ts_code = tree_node_structure_for_code (code);
505 /* Mark the TS structure itself. */
506 tree_contains_struct[code][ts_code] = 1;
508 /* Mark all the structures that TS is derived from. */
509 switch (ts_code)
511 case TS_TYPED:
512 case TS_BLOCK:
513 case TS_OPTIMIZATION:
514 case TS_TARGET_OPTION:
515 MARK_TS_BASE (code);
516 break;
518 case TS_COMMON:
519 case TS_INT_CST:
520 case TS_REAL_CST:
521 case TS_FIXED_CST:
522 case TS_VECTOR:
523 case TS_STRING:
524 case TS_COMPLEX:
525 case TS_SSA_NAME:
526 case TS_CONSTRUCTOR:
527 case TS_EXP:
528 case TS_STATEMENT_LIST:
529 MARK_TS_TYPED (code);
530 break;
532 case TS_IDENTIFIER:
533 case TS_DECL_MINIMAL:
534 case TS_TYPE_COMMON:
535 case TS_LIST:
536 case TS_VEC:
537 case TS_BINFO:
538 case TS_OMP_CLAUSE:
539 MARK_TS_COMMON (code);
540 break;
542 case TS_TYPE_WITH_LANG_SPECIFIC:
543 MARK_TS_TYPE_COMMON (code);
544 break;
546 case TS_TYPE_NON_COMMON:
547 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
548 break;
550 case TS_DECL_COMMON:
551 MARK_TS_DECL_MINIMAL (code);
552 break;
554 case TS_DECL_WRTL:
555 case TS_CONST_DECL:
556 MARK_TS_DECL_COMMON (code);
557 break;
559 case TS_DECL_NON_COMMON:
560 MARK_TS_DECL_WITH_VIS (code);
561 break;
563 case TS_DECL_WITH_VIS:
564 case TS_PARM_DECL:
565 case TS_LABEL_DECL:
566 case TS_RESULT_DECL:
567 MARK_TS_DECL_WRTL (code);
568 break;
570 case TS_FIELD_DECL:
571 MARK_TS_DECL_COMMON (code);
572 break;
574 case TS_VAR_DECL:
575 MARK_TS_DECL_WITH_VIS (code);
576 break;
578 case TS_TYPE_DECL:
579 case TS_FUNCTION_DECL:
580 MARK_TS_DECL_NON_COMMON (code);
581 break;
583 case TS_TRANSLATION_UNIT_DECL:
584 MARK_TS_DECL_COMMON (code);
585 break;
587 default:
588 gcc_unreachable ();
592 /* Basic consistency checks for attributes used in fold. */
593 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
594 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
595 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
603 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
604 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
608 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
609 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
621 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
622 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
624 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
625 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
626 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
628 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
629 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
632 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
636 /* Init tree.c. */
638 void
639 init_ttree (void)
641 /* Initialize the hash table of types. */
642 type_hash_table
643 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
645 debug_expr_for_decl
646 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
648 value_expr_for_decl
649 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
651 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
653 int_cst_node = make_int_cst (1, 1);
655 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
657 cl_optimization_node = make_node (OPTIMIZATION_NODE);
658 cl_target_option_node = make_node (TARGET_OPTION_NODE);
660 /* Initialize the tree_contains_struct array. */
661 initialize_tree_contains_struct ();
662 lang_hooks.init_ts ();
666 /* The name of the object as the assembler will see it (but before any
667 translations made by ASM_OUTPUT_LABELREF). Often this is the same
668 as DECL_NAME. It is an IDENTIFIER_NODE. */
669 tree
670 decl_assembler_name (tree decl)
672 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
673 lang_hooks.set_decl_assembler_name (decl);
674 return DECL_ASSEMBLER_NAME_RAW (decl);
677 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
678 (either of which may be NULL). Inform the FE, if this changes the
679 name. */
681 void
682 overwrite_decl_assembler_name (tree decl, tree name)
684 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
685 lang_hooks.overwrite_decl_assembler_name (decl, name);
688 /* When the target supports COMDAT groups, this indicates which group the
689 DECL is associated with. This can be either an IDENTIFIER_NODE or a
690 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
691 tree
692 decl_comdat_group (const_tree node)
694 struct symtab_node *snode = symtab_node::get (node);
695 if (!snode)
696 return NULL;
697 return snode->get_comdat_group ();
700 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
701 tree
702 decl_comdat_group_id (const_tree node)
704 struct symtab_node *snode = symtab_node::get (node);
705 if (!snode)
706 return NULL;
707 return snode->get_comdat_group_id ();
710 /* When the target supports named section, return its name as IDENTIFIER_NODE
711 or NULL if it is in no section. */
712 const char *
713 decl_section_name (const_tree node)
715 struct symtab_node *snode = symtab_node::get (node);
716 if (!snode)
717 return NULL;
718 return snode->get_section ();
721 /* Set section name of NODE to VALUE (that is expected to be
722 identifier node) */
723 void
724 set_decl_section_name (tree node, const char *value)
726 struct symtab_node *snode;
728 if (value == NULL)
730 snode = symtab_node::get (node);
731 if (!snode)
732 return;
734 else if (VAR_P (node))
735 snode = varpool_node::get_create (node);
736 else
737 snode = cgraph_node::get_create (node);
738 snode->set_section (value);
741 /* Return TLS model of a variable NODE. */
742 enum tls_model
743 decl_tls_model (const_tree node)
745 struct varpool_node *snode = varpool_node::get (node);
746 if (!snode)
747 return TLS_MODEL_NONE;
748 return snode->tls_model;
751 /* Set TLS model of variable NODE to MODEL. */
752 void
753 set_decl_tls_model (tree node, enum tls_model model)
755 struct varpool_node *vnode;
757 if (model == TLS_MODEL_NONE)
759 vnode = varpool_node::get (node);
760 if (!vnode)
761 return;
763 else
764 vnode = varpool_node::get_create (node);
765 vnode->tls_model = model;
768 /* Compute the number of bytes occupied by a tree with code CODE.
769 This function cannot be used for nodes that have variable sizes,
770 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
771 size_t
772 tree_code_size (enum tree_code code)
774 switch (TREE_CODE_CLASS (code))
776 case tcc_declaration: /* A decl node */
777 switch (code)
779 case FIELD_DECL: return sizeof (tree_field_decl);
780 case PARM_DECL: return sizeof (tree_parm_decl);
781 case VAR_DECL: return sizeof (tree_var_decl);
782 case LABEL_DECL: return sizeof (tree_label_decl);
783 case RESULT_DECL: return sizeof (tree_result_decl);
784 case CONST_DECL: return sizeof (tree_const_decl);
785 case TYPE_DECL: return sizeof (tree_type_decl);
786 case FUNCTION_DECL: return sizeof (tree_function_decl);
787 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
788 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
789 case NAMESPACE_DECL:
790 case IMPORTED_DECL:
791 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
792 default:
793 gcc_checking_assert (code >= NUM_TREE_CODES);
794 return lang_hooks.tree_size (code);
797 case tcc_type: /* a type node */
798 switch (code)
800 case OFFSET_TYPE:
801 case ENUMERAL_TYPE:
802 case BOOLEAN_TYPE:
803 case INTEGER_TYPE:
804 case REAL_TYPE:
805 case POINTER_TYPE:
806 case REFERENCE_TYPE:
807 case NULLPTR_TYPE:
808 case FIXED_POINT_TYPE:
809 case COMPLEX_TYPE:
810 case VECTOR_TYPE:
811 case ARRAY_TYPE:
812 case RECORD_TYPE:
813 case UNION_TYPE:
814 case QUAL_UNION_TYPE:
815 case VOID_TYPE:
816 case POINTER_BOUNDS_TYPE:
817 case FUNCTION_TYPE:
818 case METHOD_TYPE:
819 case LANG_TYPE: return sizeof (tree_type_non_common);
820 default:
821 gcc_checking_assert (code >= NUM_TREE_CODES);
822 return lang_hooks.tree_size (code);
825 case tcc_reference: /* a reference */
826 case tcc_expression: /* an expression */
827 case tcc_statement: /* an expression with side effects */
828 case tcc_comparison: /* a comparison expression */
829 case tcc_unary: /* a unary arithmetic expression */
830 case tcc_binary: /* a binary arithmetic expression */
831 return (sizeof (struct tree_exp)
832 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
834 case tcc_constant: /* a constant */
835 switch (code)
837 case VOID_CST: return sizeof (tree_typed);
838 case INTEGER_CST: gcc_unreachable ();
839 case REAL_CST: return sizeof (tree_real_cst);
840 case FIXED_CST: return sizeof (tree_fixed_cst);
841 case COMPLEX_CST: return sizeof (tree_complex);
842 case VECTOR_CST: return sizeof (tree_vector);
843 case STRING_CST: gcc_unreachable ();
844 default:
845 gcc_checking_assert (code >= NUM_TREE_CODES);
846 return lang_hooks.tree_size (code);
849 case tcc_exceptional: /* something random, like an identifier. */
850 switch (code)
852 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
853 case TREE_LIST: return sizeof (tree_list);
855 case ERROR_MARK:
856 case PLACEHOLDER_EXPR: return sizeof (tree_common);
858 case TREE_VEC: gcc_unreachable ();
859 case OMP_CLAUSE: gcc_unreachable ();
861 case SSA_NAME: return sizeof (tree_ssa_name);
863 case STATEMENT_LIST: return sizeof (tree_statement_list);
864 case BLOCK: return sizeof (struct tree_block);
865 case CONSTRUCTOR: return sizeof (tree_constructor);
866 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
867 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
869 default:
870 gcc_checking_assert (code >= NUM_TREE_CODES);
871 return lang_hooks.tree_size (code);
874 default:
875 gcc_unreachable ();
879 /* Compute the number of bytes occupied by NODE. This routine only
880 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 size_t
882 tree_size (const_tree node)
884 const enum tree_code code = TREE_CODE (node);
885 switch (code)
887 case INTEGER_CST:
888 return (sizeof (struct tree_int_cst)
889 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
891 case TREE_BINFO:
892 return (offsetof (struct tree_binfo, base_binfos)
893 + vec<tree, va_gc>
894 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
896 case TREE_VEC:
897 return (sizeof (struct tree_vec)
898 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
900 case VECTOR_CST:
901 return (sizeof (struct tree_vector)
902 + (VECTOR_CST_NELTS (node) - 1) * sizeof (tree));
904 case STRING_CST:
905 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
907 case OMP_CLAUSE:
908 return (sizeof (struct tree_omp_clause)
909 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
910 * sizeof (tree));
912 default:
913 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
914 return (sizeof (struct tree_exp)
915 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
916 else
917 return tree_code_size (code);
921 /* Record interesting allocation statistics for a tree node with CODE
922 and LENGTH. */
924 static void
925 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
926 size_t length ATTRIBUTE_UNUSED)
928 enum tree_code_class type = TREE_CODE_CLASS (code);
929 tree_node_kind kind;
931 if (!GATHER_STATISTICS)
932 return;
934 switch (type)
936 case tcc_declaration: /* A decl node */
937 kind = d_kind;
938 break;
940 case tcc_type: /* a type node */
941 kind = t_kind;
942 break;
944 case tcc_statement: /* an expression with side effects */
945 kind = s_kind;
946 break;
948 case tcc_reference: /* a reference */
949 kind = r_kind;
950 break;
952 case tcc_expression: /* an expression */
953 case tcc_comparison: /* a comparison expression */
954 case tcc_unary: /* a unary arithmetic expression */
955 case tcc_binary: /* a binary arithmetic expression */
956 kind = e_kind;
957 break;
959 case tcc_constant: /* a constant */
960 kind = c_kind;
961 break;
963 case tcc_exceptional: /* something random, like an identifier. */
964 switch (code)
966 case IDENTIFIER_NODE:
967 kind = id_kind;
968 break;
970 case TREE_VEC:
971 kind = vec_kind;
972 break;
974 case TREE_BINFO:
975 kind = binfo_kind;
976 break;
978 case SSA_NAME:
979 kind = ssa_name_kind;
980 break;
982 case BLOCK:
983 kind = b_kind;
984 break;
986 case CONSTRUCTOR:
987 kind = constr_kind;
988 break;
990 case OMP_CLAUSE:
991 kind = omp_clause_kind;
992 break;
994 default:
995 kind = x_kind;
996 break;
998 break;
1000 case tcc_vl_exp:
1001 kind = e_kind;
1002 break;
1004 default:
1005 gcc_unreachable ();
1008 tree_code_counts[(int) code]++;
1009 tree_node_counts[(int) kind]++;
1010 tree_node_sizes[(int) kind] += length;
1013 /* Allocate and return a new UID from the DECL_UID namespace. */
1016 allocate_decl_uid (void)
1018 return next_decl_uid++;
1021 /* Return a newly allocated node of code CODE. For decl and type
1022 nodes, some other fields are initialized. The rest of the node is
1023 initialized to zero. This function cannot be used for TREE_VEC,
1024 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1025 tree_code_size.
1027 Achoo! I got a code in the node. */
1029 tree
1030 make_node (enum tree_code code MEM_STAT_DECL)
1032 tree t;
1033 enum tree_code_class type = TREE_CODE_CLASS (code);
1034 size_t length = tree_code_size (code);
1036 record_node_allocation_statistics (code, length);
1038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1039 TREE_SET_CODE (t, code);
1041 switch (type)
1043 case tcc_statement:
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1050 if (code == FUNCTION_DECL)
1052 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1053 SET_DECL_MODE (t, FUNCTION_MODE);
1055 else
1056 SET_DECL_ALIGN (t, 1);
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1069 break;
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1090 case tcc_expression:
1091 switch (code)
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1105 default:
1106 break;
1108 break;
1110 case tcc_exceptional:
1111 switch (code)
1113 case TARGET_OPTION_NODE:
1114 TREE_TARGET_OPTION(t)
1115 = ggc_cleared_alloc<struct cl_target_option> ();
1116 break;
1118 case OPTIMIZATION_NODE:
1119 TREE_OPTIMIZATION (t)
1120 = ggc_cleared_alloc<struct cl_optimization> ();
1121 break;
1123 default:
1124 break;
1126 break;
1128 default:
1129 /* Other classes need no special treatment. */
1130 break;
1133 return t;
1136 /* Free tree node. */
1138 void
1139 free_node (tree node)
1141 enum tree_code code = TREE_CODE (node);
1142 if (GATHER_STATISTICS)
1144 tree_code_counts[(int) TREE_CODE (node)]--;
1145 tree_node_counts[(int) t_kind]--;
1146 tree_node_sizes[(int) t_kind] -= tree_size (node);
1148 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1149 vec_free (CONSTRUCTOR_ELTS (node));
1150 else if (code == BLOCK)
1151 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1152 else if (code == TREE_BINFO)
1153 vec_free (BINFO_BASE_ACCESSES (node));
1154 ggc_free (node);
1157 /* Return a new node with the same contents as NODE except that its
1158 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1160 tree
1161 copy_node (tree node MEM_STAT_DECL)
1163 tree t;
1164 enum tree_code code = TREE_CODE (node);
1165 size_t length;
1167 gcc_assert (code != STATEMENT_LIST);
1169 length = tree_size (node);
1170 record_node_allocation_statistics (code, length);
1171 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1172 memcpy (t, node, length);
1174 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1175 TREE_CHAIN (t) = 0;
1176 TREE_ASM_WRITTEN (t) = 0;
1177 TREE_VISITED (t) = 0;
1179 if (TREE_CODE_CLASS (code) == tcc_declaration)
1181 if (code == DEBUG_EXPR_DECL)
1182 DECL_UID (t) = --next_debug_decl_uid;
1183 else
1185 DECL_UID (t) = allocate_decl_uid ();
1186 if (DECL_PT_UID_SET_P (node))
1187 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1189 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1190 && DECL_HAS_VALUE_EXPR_P (node))
1192 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1193 DECL_HAS_VALUE_EXPR_P (t) = 1;
1195 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1196 if (VAR_P (node))
1198 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1199 t->decl_with_vis.symtab_node = NULL;
1201 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1203 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1204 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1206 if (TREE_CODE (node) == FUNCTION_DECL)
1208 DECL_STRUCT_FUNCTION (t) = NULL;
1209 t->decl_with_vis.symtab_node = NULL;
1212 else if (TREE_CODE_CLASS (code) == tcc_type)
1214 TYPE_UID (t) = next_type_uid++;
1215 /* The following is so that the debug code for
1216 the copy is different from the original type.
1217 The two statements usually duplicate each other
1218 (because they clear fields of the same union),
1219 but the optimizer should catch that. */
1220 TYPE_SYMTAB_ADDRESS (t) = 0;
1221 TYPE_SYMTAB_DIE (t) = 0;
1223 /* Do not copy the values cache. */
1224 if (TYPE_CACHED_VALUES_P (t))
1226 TYPE_CACHED_VALUES_P (t) = 0;
1227 TYPE_CACHED_VALUES (t) = NULL_TREE;
1230 else if (code == TARGET_OPTION_NODE)
1232 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1233 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1234 sizeof (struct cl_target_option));
1236 else if (code == OPTIMIZATION_NODE)
1238 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1239 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1240 sizeof (struct cl_optimization));
1243 return t;
1246 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1247 For example, this can copy a list made of TREE_LIST nodes. */
1249 tree
1250 copy_list (tree list)
1252 tree head;
1253 tree prev, next;
1255 if (list == 0)
1256 return 0;
1258 head = prev = copy_node (list);
1259 next = TREE_CHAIN (list);
1260 while (next)
1262 TREE_CHAIN (prev) = copy_node (next);
1263 prev = TREE_CHAIN (prev);
1264 next = TREE_CHAIN (next);
1266 return head;
1270 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1271 INTEGER_CST with value CST and type TYPE. */
1273 static unsigned int
1274 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1276 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1277 /* We need extra HWIs if CST is an unsigned integer with its
1278 upper bit set. */
1279 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1280 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1281 return cst.get_len ();
1284 /* Return a new INTEGER_CST with value CST and type TYPE. */
1286 static tree
1287 build_new_int_cst (tree type, const wide_int &cst)
1289 unsigned int len = cst.get_len ();
1290 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1291 tree nt = make_int_cst (len, ext_len);
1293 if (len < ext_len)
1295 --ext_len;
1296 TREE_INT_CST_ELT (nt, ext_len)
1297 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1298 for (unsigned int i = len; i < ext_len; ++i)
1299 TREE_INT_CST_ELT (nt, i) = -1;
1301 else if (TYPE_UNSIGNED (type)
1302 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1304 len--;
1305 TREE_INT_CST_ELT (nt, len)
1306 = zext_hwi (cst.elt (len),
1307 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1310 for (unsigned int i = 0; i < len; i++)
1311 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1312 TREE_TYPE (nt) = type;
1313 return nt;
1316 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1318 tree
1319 build_int_cst (tree type, HOST_WIDE_INT low)
1321 /* Support legacy code. */
1322 if (!type)
1323 type = integer_type_node;
1325 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1328 tree
1329 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1331 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1334 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1336 tree
1337 build_int_cst_type (tree type, HOST_WIDE_INT low)
1339 gcc_assert (type);
1340 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1343 /* Constructs tree in type TYPE from with value given by CST. Signedness
1344 of CST is assumed to be the same as the signedness of TYPE. */
1346 tree
1347 double_int_to_tree (tree type, double_int cst)
1349 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1352 /* We force the wide_int CST to the range of the type TYPE by sign or
1353 zero extending it. OVERFLOWABLE indicates if we are interested in
1354 overflow of the value, when >0 we are only interested in signed
1355 overflow, for <0 we are interested in any overflow. OVERFLOWED
1356 indicates whether overflow has already occurred. CONST_OVERFLOWED
1357 indicates whether constant overflow has already occurred. We force
1358 T's value to be within range of T's type (by setting to 0 or 1 all
1359 the bits outside the type's range). We set TREE_OVERFLOWED if,
1360 OVERFLOWED is nonzero,
1361 or OVERFLOWABLE is >0 and signed overflow occurs
1362 or OVERFLOWABLE is <0 and any overflow occurs
1363 We return a new tree node for the extended wide_int. The node
1364 is shared if no overflow flags are set. */
1367 tree
1368 force_fit_type (tree type, const wide_int_ref &cst,
1369 int overflowable, bool overflowed)
1371 signop sign = TYPE_SIGN (type);
1373 /* If we need to set overflow flags, return a new unshared node. */
1374 if (overflowed || !wi::fits_to_tree_p (cst, type))
1376 if (overflowed
1377 || overflowable < 0
1378 || (overflowable > 0 && sign == SIGNED))
1380 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1381 tree t = build_new_int_cst (type, tmp);
1382 TREE_OVERFLOW (t) = 1;
1383 return t;
1387 /* Else build a shared node. */
1388 return wide_int_to_tree (type, cst);
1391 /* These are the hash table functions for the hash table of INTEGER_CST
1392 nodes of a sizetype. */
1394 /* Return the hash code X, an INTEGER_CST. */
1396 hashval_t
1397 int_cst_hasher::hash (tree x)
1399 const_tree const t = x;
1400 hashval_t code = TYPE_UID (TREE_TYPE (t));
1401 int i;
1403 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1404 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1406 return code;
1409 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1410 is the same as that given by *Y, which is the same. */
1412 bool
1413 int_cst_hasher::equal (tree x, tree y)
1415 const_tree const xt = x;
1416 const_tree const yt = y;
1418 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1419 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1420 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1421 return false;
1423 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1424 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1425 return false;
1427 return true;
1430 /* Create an INT_CST node of TYPE and value CST.
1431 The returned node is always shared. For small integers we use a
1432 per-type vector cache, for larger ones we use a single hash table.
1433 The value is extended from its precision according to the sign of
1434 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1435 the upper bits and ensures that hashing and value equality based
1436 upon the underlying HOST_WIDE_INTs works without masking. */
1438 tree
1439 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1441 tree t;
1442 int ix = -1;
1443 int limit = 0;
1445 gcc_assert (type);
1446 unsigned int prec = TYPE_PRECISION (type);
1447 signop sgn = TYPE_SIGN (type);
1449 /* Verify that everything is canonical. */
1450 int l = pcst.get_len ();
1451 if (l > 1)
1453 if (pcst.elt (l - 1) == 0)
1454 gcc_checking_assert (pcst.elt (l - 2) < 0);
1455 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1456 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1459 wide_int cst = wide_int::from (pcst, prec, sgn);
1460 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1462 if (ext_len == 1)
1464 /* We just need to store a single HOST_WIDE_INT. */
1465 HOST_WIDE_INT hwi;
1466 if (TYPE_UNSIGNED (type))
1467 hwi = cst.to_uhwi ();
1468 else
1469 hwi = cst.to_shwi ();
1471 switch (TREE_CODE (type))
1473 case NULLPTR_TYPE:
1474 gcc_assert (hwi == 0);
1475 /* Fallthru. */
1477 case POINTER_TYPE:
1478 case REFERENCE_TYPE:
1479 case POINTER_BOUNDS_TYPE:
1480 /* Cache NULL pointer and zero bounds. */
1481 if (hwi == 0)
1483 limit = 1;
1484 ix = 0;
1486 break;
1488 case BOOLEAN_TYPE:
1489 /* Cache false or true. */
1490 limit = 2;
1491 if (IN_RANGE (hwi, 0, 1))
1492 ix = hwi;
1493 break;
1495 case INTEGER_TYPE:
1496 case OFFSET_TYPE:
1497 if (TYPE_SIGN (type) == UNSIGNED)
1499 /* Cache [0, N). */
1500 limit = INTEGER_SHARE_LIMIT;
1501 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1502 ix = hwi;
1504 else
1506 /* Cache [-1, N). */
1507 limit = INTEGER_SHARE_LIMIT + 1;
1508 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1509 ix = hwi + 1;
1511 break;
1513 case ENUMERAL_TYPE:
1514 break;
1516 default:
1517 gcc_unreachable ();
1520 if (ix >= 0)
1522 /* Look for it in the type's vector of small shared ints. */
1523 if (!TYPE_CACHED_VALUES_P (type))
1525 TYPE_CACHED_VALUES_P (type) = 1;
1526 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1529 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1530 if (t)
1531 /* Make sure no one is clobbering the shared constant. */
1532 gcc_checking_assert (TREE_TYPE (t) == type
1533 && TREE_INT_CST_NUNITS (t) == 1
1534 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1535 && TREE_INT_CST_EXT_NUNITS (t) == 1
1536 && TREE_INT_CST_ELT (t, 0) == hwi);
1537 else
1539 /* Create a new shared int. */
1540 t = build_new_int_cst (type, cst);
1541 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1544 else
1546 /* Use the cache of larger shared ints, using int_cst_node as
1547 a temporary. */
1549 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1550 TREE_TYPE (int_cst_node) = type;
1552 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1553 t = *slot;
1554 if (!t)
1556 /* Insert this one into the hash table. */
1557 t = int_cst_node;
1558 *slot = t;
1559 /* Make a new node for next time round. */
1560 int_cst_node = make_int_cst (1, 1);
1564 else
1566 /* The value either hashes properly or we drop it on the floor
1567 for the gc to take care of. There will not be enough of them
1568 to worry about. */
1570 tree nt = build_new_int_cst (type, cst);
1571 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1572 t = *slot;
1573 if (!t)
1575 /* Insert this one into the hash table. */
1576 t = nt;
1577 *slot = t;
1581 return t;
1584 void
1585 cache_integer_cst (tree t)
1587 tree type = TREE_TYPE (t);
1588 int ix = -1;
1589 int limit = 0;
1590 int prec = TYPE_PRECISION (type);
1592 gcc_assert (!TREE_OVERFLOW (t));
1594 switch (TREE_CODE (type))
1596 case NULLPTR_TYPE:
1597 gcc_assert (integer_zerop (t));
1598 /* Fallthru. */
1600 case POINTER_TYPE:
1601 case REFERENCE_TYPE:
1602 /* Cache NULL pointer. */
1603 if (integer_zerop (t))
1605 limit = 1;
1606 ix = 0;
1608 break;
1610 case BOOLEAN_TYPE:
1611 /* Cache false or true. */
1612 limit = 2;
1613 if (wi::ltu_p (wi::to_wide (t), 2))
1614 ix = TREE_INT_CST_ELT (t, 0);
1615 break;
1617 case INTEGER_TYPE:
1618 case OFFSET_TYPE:
1619 if (TYPE_UNSIGNED (type))
1621 /* Cache 0..N */
1622 limit = INTEGER_SHARE_LIMIT;
1624 /* This is a little hokie, but if the prec is smaller than
1625 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1626 obvious test will not get the correct answer. */
1627 if (prec < HOST_BITS_PER_WIDE_INT)
1629 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1630 ix = tree_to_uhwi (t);
1632 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1633 ix = tree_to_uhwi (t);
1635 else
1637 /* Cache -1..N */
1638 limit = INTEGER_SHARE_LIMIT + 1;
1640 if (integer_minus_onep (t))
1641 ix = 0;
1642 else if (!wi::neg_p (wi::to_wide (t)))
1644 if (prec < HOST_BITS_PER_WIDE_INT)
1646 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1647 ix = tree_to_shwi (t) + 1;
1649 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1650 ix = tree_to_shwi (t) + 1;
1653 break;
1655 case ENUMERAL_TYPE:
1656 break;
1658 default:
1659 gcc_unreachable ();
1662 if (ix >= 0)
1664 /* Look for it in the type's vector of small shared ints. */
1665 if (!TYPE_CACHED_VALUES_P (type))
1667 TYPE_CACHED_VALUES_P (type) = 1;
1668 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1671 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1672 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1674 else
1676 /* Use the cache of larger shared ints. */
1677 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1678 /* If there is already an entry for the number verify it's the
1679 same. */
1680 if (*slot)
1681 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1682 else
1683 /* Otherwise insert this one into the hash table. */
1684 *slot = t;
1689 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1690 and the rest are zeros. */
1692 tree
1693 build_low_bits_mask (tree type, unsigned bits)
1695 gcc_assert (bits <= TYPE_PRECISION (type));
1697 return wide_int_to_tree (type, wi::mask (bits, false,
1698 TYPE_PRECISION (type)));
1701 /* Checks that X is integer constant that can be expressed in (unsigned)
1702 HOST_WIDE_INT without loss of precision. */
1704 bool
1705 cst_and_fits_in_hwi (const_tree x)
1707 return (TREE_CODE (x) == INTEGER_CST
1708 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1711 /* Build a newly constructed VECTOR_CST node of length LEN. */
1713 tree
1714 make_vector (unsigned len MEM_STAT_DECL)
1716 tree t;
1717 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1719 record_node_allocation_statistics (VECTOR_CST, length);
1721 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1723 TREE_SET_CODE (t, VECTOR_CST);
1724 TREE_CONSTANT (t) = 1;
1725 VECTOR_CST_NELTS (t) = len;
1727 return t;
1730 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1731 are given by VALS. */
1733 tree
1734 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
1736 unsigned int nelts = vals.length ();
1737 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
1738 int over = 0;
1739 unsigned cnt = 0;
1740 tree v = make_vector (nelts);
1741 TREE_TYPE (v) = type;
1743 /* Iterate through elements and check for overflow. */
1744 for (cnt = 0; cnt < nelts; ++cnt)
1746 tree value = vals[cnt];
1748 VECTOR_CST_ELT (v, cnt) = value;
1750 /* Don't crash if we get an address constant. */
1751 if (!CONSTANT_CLASS_P (value))
1752 continue;
1754 over |= TREE_OVERFLOW (value);
1757 TREE_OVERFLOW (v) = over;
1758 return v;
1761 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1762 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1764 tree
1765 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1767 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1768 unsigned HOST_WIDE_INT idx;
1769 tree value;
1771 auto_vec<tree, 32> vec (nelts);
1772 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1774 if (TREE_CODE (value) == VECTOR_CST)
1775 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1776 vec.quick_push (VECTOR_CST_ELT (value, i));
1777 else
1778 vec.quick_push (value);
1780 while (vec.length () < nelts)
1781 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1783 return build_vector (type, vec);
1786 /* Build a vector of type VECTYPE where all the elements are SCs. */
1787 tree
1788 build_vector_from_val (tree vectype, tree sc)
1790 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1792 if (sc == error_mark_node)
1793 return sc;
1795 /* Verify that the vector type is suitable for SC. Note that there
1796 is some inconsistency in the type-system with respect to restrict
1797 qualifications of pointers. Vector types always have a main-variant
1798 element type and the qualification is applied to the vector-type.
1799 So TREE_TYPE (vector-type) does not return a properly qualified
1800 vector element-type. */
1801 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1802 TREE_TYPE (vectype)));
1804 if (CONSTANT_CLASS_P (sc))
1806 auto_vec<tree, 32> v (nunits);
1807 for (i = 0; i < nunits; ++i)
1808 v.quick_push (sc);
1809 return build_vector (vectype, v);
1811 else
1813 vec<constructor_elt, va_gc> *v;
1814 vec_alloc (v, nunits);
1815 for (i = 0; i < nunits; ++i)
1816 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1817 return build_constructor (vectype, v);
1821 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1822 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1824 void
1825 recompute_constructor_flags (tree c)
1827 unsigned int i;
1828 tree val;
1829 bool constant_p = true;
1830 bool side_effects_p = false;
1831 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1833 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1835 /* Mostly ctors will have elts that don't have side-effects, so
1836 the usual case is to scan all the elements. Hence a single
1837 loop for both const and side effects, rather than one loop
1838 each (with early outs). */
1839 if (!TREE_CONSTANT (val))
1840 constant_p = false;
1841 if (TREE_SIDE_EFFECTS (val))
1842 side_effects_p = true;
1845 TREE_SIDE_EFFECTS (c) = side_effects_p;
1846 TREE_CONSTANT (c) = constant_p;
1849 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1850 CONSTRUCTOR C. */
1852 void
1853 verify_constructor_flags (tree c)
1855 unsigned int i;
1856 tree val;
1857 bool constant_p = TREE_CONSTANT (c);
1858 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1859 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1861 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1863 if (constant_p && !TREE_CONSTANT (val))
1864 internal_error ("non-constant element in constant CONSTRUCTOR");
1865 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1866 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1870 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1871 are in the vec pointed to by VALS. */
1872 tree
1873 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1875 tree c = make_node (CONSTRUCTOR);
1877 TREE_TYPE (c) = type;
1878 CONSTRUCTOR_ELTS (c) = vals;
1880 recompute_constructor_flags (c);
1882 return c;
1885 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1886 INDEX and VALUE. */
1887 tree
1888 build_constructor_single (tree type, tree index, tree value)
1890 vec<constructor_elt, va_gc> *v;
1891 constructor_elt elt = {index, value};
1893 vec_alloc (v, 1);
1894 v->quick_push (elt);
1896 return build_constructor (type, v);
1900 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1901 are in a list pointed to by VALS. */
1902 tree
1903 build_constructor_from_list (tree type, tree vals)
1905 tree t;
1906 vec<constructor_elt, va_gc> *v = NULL;
1908 if (vals)
1910 vec_alloc (v, list_length (vals));
1911 for (t = vals; t; t = TREE_CHAIN (t))
1912 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1915 return build_constructor (type, v);
1918 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1919 of elements, provided as index/value pairs. */
1921 tree
1922 build_constructor_va (tree type, int nelts, ...)
1924 vec<constructor_elt, va_gc> *v = NULL;
1925 va_list p;
1927 va_start (p, nelts);
1928 vec_alloc (v, nelts);
1929 while (nelts--)
1931 tree index = va_arg (p, tree);
1932 tree value = va_arg (p, tree);
1933 CONSTRUCTOR_APPEND_ELT (v, index, value);
1935 va_end (p);
1936 return build_constructor (type, v);
1939 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1941 tree
1942 build_fixed (tree type, FIXED_VALUE_TYPE f)
1944 tree v;
1945 FIXED_VALUE_TYPE *fp;
1947 v = make_node (FIXED_CST);
1948 fp = ggc_alloc<fixed_value> ();
1949 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1951 TREE_TYPE (v) = type;
1952 TREE_FIXED_CST_PTR (v) = fp;
1953 return v;
1956 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1958 tree
1959 build_real (tree type, REAL_VALUE_TYPE d)
1961 tree v;
1962 REAL_VALUE_TYPE *dp;
1963 int overflow = 0;
1965 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1966 Consider doing it via real_convert now. */
1968 v = make_node (REAL_CST);
1969 dp = ggc_alloc<real_value> ();
1970 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1972 TREE_TYPE (v) = type;
1973 TREE_REAL_CST_PTR (v) = dp;
1974 TREE_OVERFLOW (v) = overflow;
1975 return v;
1978 /* Like build_real, but first truncate D to the type. */
1980 tree
1981 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1983 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1986 /* Return a new REAL_CST node whose type is TYPE
1987 and whose value is the integer value of the INTEGER_CST node I. */
1989 REAL_VALUE_TYPE
1990 real_value_from_int_cst (const_tree type, const_tree i)
1992 REAL_VALUE_TYPE d;
1994 /* Clear all bits of the real value type so that we can later do
1995 bitwise comparisons to see if two values are the same. */
1996 memset (&d, 0, sizeof d);
1998 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
1999 TYPE_SIGN (TREE_TYPE (i)));
2000 return d;
2003 /* Given a tree representing an integer constant I, return a tree
2004 representing the same value as a floating-point constant of type TYPE. */
2006 tree
2007 build_real_from_int_cst (tree type, const_tree i)
2009 tree v;
2010 int overflow = TREE_OVERFLOW (i);
2012 v = build_real (type, real_value_from_int_cst (type, i));
2014 TREE_OVERFLOW (v) |= overflow;
2015 return v;
2018 /* Return a newly constructed STRING_CST node whose value is
2019 the LEN characters at STR.
2020 Note that for a C string literal, LEN should include the trailing NUL.
2021 The TREE_TYPE is not initialized. */
2023 tree
2024 build_string (int len, const char *str)
2026 tree s;
2027 size_t length;
2029 /* Do not waste bytes provided by padding of struct tree_string. */
2030 length = len + offsetof (struct tree_string, str) + 1;
2032 record_node_allocation_statistics (STRING_CST, length);
2034 s = (tree) ggc_internal_alloc (length);
2036 memset (s, 0, sizeof (struct tree_typed));
2037 TREE_SET_CODE (s, STRING_CST);
2038 TREE_CONSTANT (s) = 1;
2039 TREE_STRING_LENGTH (s) = len;
2040 memcpy (s->string.str, str, len);
2041 s->string.str[len] = '\0';
2043 return s;
2046 /* Return a newly constructed COMPLEX_CST node whose value is
2047 specified by the real and imaginary parts REAL and IMAG.
2048 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2049 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2051 tree
2052 build_complex (tree type, tree real, tree imag)
2054 tree t = make_node (COMPLEX_CST);
2056 TREE_REALPART (t) = real;
2057 TREE_IMAGPART (t) = imag;
2058 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2059 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2060 return t;
2063 /* Build a complex (inf +- 0i), such as for the result of cproj.
2064 TYPE is the complex tree type of the result. If NEG is true, the
2065 imaginary zero is negative. */
2067 tree
2068 build_complex_inf (tree type, bool neg)
2070 REAL_VALUE_TYPE rinf, rzero = dconst0;
2072 real_inf (&rinf);
2073 rzero.sign = neg;
2074 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2075 build_real (TREE_TYPE (type), rzero));
2078 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2079 element is set to 1. In particular, this is 1 + i for complex types. */
2081 tree
2082 build_each_one_cst (tree type)
2084 if (TREE_CODE (type) == COMPLEX_TYPE)
2086 tree scalar = build_one_cst (TREE_TYPE (type));
2087 return build_complex (type, scalar, scalar);
2089 else
2090 return build_one_cst (type);
2093 /* Return a constant of arithmetic type TYPE which is the
2094 multiplicative identity of the set TYPE. */
2096 tree
2097 build_one_cst (tree type)
2099 switch (TREE_CODE (type))
2101 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2102 case POINTER_TYPE: case REFERENCE_TYPE:
2103 case OFFSET_TYPE:
2104 return build_int_cst (type, 1);
2106 case REAL_TYPE:
2107 return build_real (type, dconst1);
2109 case FIXED_POINT_TYPE:
2110 /* We can only generate 1 for accum types. */
2111 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2112 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2114 case VECTOR_TYPE:
2116 tree scalar = build_one_cst (TREE_TYPE (type));
2118 return build_vector_from_val (type, scalar);
2121 case COMPLEX_TYPE:
2122 return build_complex (type,
2123 build_one_cst (TREE_TYPE (type)),
2124 build_zero_cst (TREE_TYPE (type)));
2126 default:
2127 gcc_unreachable ();
2131 /* Return an integer of type TYPE containing all 1's in as much precision as
2132 it contains, or a complex or vector whose subparts are such integers. */
2134 tree
2135 build_all_ones_cst (tree type)
2137 if (TREE_CODE (type) == COMPLEX_TYPE)
2139 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2140 return build_complex (type, scalar, scalar);
2142 else
2143 return build_minus_one_cst (type);
2146 /* Return a constant of arithmetic type TYPE which is the
2147 opposite of the multiplicative identity of the set TYPE. */
2149 tree
2150 build_minus_one_cst (tree type)
2152 switch (TREE_CODE (type))
2154 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2155 case POINTER_TYPE: case REFERENCE_TYPE:
2156 case OFFSET_TYPE:
2157 return build_int_cst (type, -1);
2159 case REAL_TYPE:
2160 return build_real (type, dconstm1);
2162 case FIXED_POINT_TYPE:
2163 /* We can only generate 1 for accum types. */
2164 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2165 return build_fixed (type,
2166 fixed_from_double_int (double_int_minus_one,
2167 SCALAR_TYPE_MODE (type)));
2169 case VECTOR_TYPE:
2171 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2173 return build_vector_from_val (type, scalar);
2176 case COMPLEX_TYPE:
2177 return build_complex (type,
2178 build_minus_one_cst (TREE_TYPE (type)),
2179 build_zero_cst (TREE_TYPE (type)));
2181 default:
2182 gcc_unreachable ();
2186 /* Build 0 constant of type TYPE. This is used by constructor folding
2187 and thus the constant should be represented in memory by
2188 zero(es). */
2190 tree
2191 build_zero_cst (tree type)
2193 switch (TREE_CODE (type))
2195 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2196 case POINTER_TYPE: case REFERENCE_TYPE:
2197 case OFFSET_TYPE: case NULLPTR_TYPE:
2198 return build_int_cst (type, 0);
2200 case REAL_TYPE:
2201 return build_real (type, dconst0);
2203 case FIXED_POINT_TYPE:
2204 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2206 case VECTOR_TYPE:
2208 tree scalar = build_zero_cst (TREE_TYPE (type));
2210 return build_vector_from_val (type, scalar);
2213 case COMPLEX_TYPE:
2215 tree zero = build_zero_cst (TREE_TYPE (type));
2217 return build_complex (type, zero, zero);
2220 default:
2221 if (!AGGREGATE_TYPE_P (type))
2222 return fold_convert (type, integer_zero_node);
2223 return build_constructor (type, NULL);
2228 /* Build a BINFO with LEN language slots. */
2230 tree
2231 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2233 tree t;
2234 size_t length = (offsetof (struct tree_binfo, base_binfos)
2235 + vec<tree, va_gc>::embedded_size (base_binfos));
2237 record_node_allocation_statistics (TREE_BINFO, length);
2239 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2241 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2243 TREE_SET_CODE (t, TREE_BINFO);
2245 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2247 return t;
2250 /* Create a CASE_LABEL_EXPR tree node and return it. */
2252 tree
2253 build_case_label (tree low_value, tree high_value, tree label_decl)
2255 tree t = make_node (CASE_LABEL_EXPR);
2257 TREE_TYPE (t) = void_type_node;
2258 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2260 CASE_LOW (t) = low_value;
2261 CASE_HIGH (t) = high_value;
2262 CASE_LABEL (t) = label_decl;
2263 CASE_CHAIN (t) = NULL_TREE;
2265 return t;
2268 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2269 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2270 The latter determines the length of the HOST_WIDE_INT vector. */
2272 tree
2273 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2275 tree t;
2276 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2277 + sizeof (struct tree_int_cst));
2279 gcc_assert (len);
2280 record_node_allocation_statistics (INTEGER_CST, length);
2282 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2284 TREE_SET_CODE (t, INTEGER_CST);
2285 TREE_INT_CST_NUNITS (t) = len;
2286 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2287 /* to_offset can only be applied to trees that are offset_int-sized
2288 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2289 must be exactly the precision of offset_int and so LEN is correct. */
2290 if (ext_len <= OFFSET_INT_ELTS)
2291 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2292 else
2293 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2295 TREE_CONSTANT (t) = 1;
2297 return t;
2300 /* Build a newly constructed TREE_VEC node of length LEN. */
2302 tree
2303 make_tree_vec (int len MEM_STAT_DECL)
2305 tree t;
2306 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2308 record_node_allocation_statistics (TREE_VEC, length);
2310 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2312 TREE_SET_CODE (t, TREE_VEC);
2313 TREE_VEC_LENGTH (t) = len;
2315 return t;
2318 /* Grow a TREE_VEC node to new length LEN. */
2320 tree
2321 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2323 gcc_assert (TREE_CODE (v) == TREE_VEC);
2325 int oldlen = TREE_VEC_LENGTH (v);
2326 gcc_assert (len > oldlen);
2328 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2329 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2331 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2333 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2335 TREE_VEC_LENGTH (v) = len;
2337 return v;
2340 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2341 fixed, and scalar, complex or vector. */
2344 zerop (const_tree expr)
2346 return (integer_zerop (expr)
2347 || real_zerop (expr)
2348 || fixed_zerop (expr));
2351 /* Return 1 if EXPR is the integer constant zero or a complex constant
2352 of zero. */
2355 integer_zerop (const_tree expr)
2357 switch (TREE_CODE (expr))
2359 case INTEGER_CST:
2360 return wi::to_wide (expr) == 0;
2361 case COMPLEX_CST:
2362 return (integer_zerop (TREE_REALPART (expr))
2363 && integer_zerop (TREE_IMAGPART (expr)));
2364 case VECTOR_CST:
2366 unsigned i;
2367 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2368 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2369 return false;
2370 return true;
2372 default:
2373 return false;
2377 /* Return 1 if EXPR is the integer constant one or the corresponding
2378 complex constant. */
2381 integer_onep (const_tree expr)
2383 switch (TREE_CODE (expr))
2385 case INTEGER_CST:
2386 return wi::eq_p (wi::to_widest (expr), 1);
2387 case COMPLEX_CST:
2388 return (integer_onep (TREE_REALPART (expr))
2389 && integer_zerop (TREE_IMAGPART (expr)));
2390 case VECTOR_CST:
2392 unsigned i;
2393 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2394 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2395 return false;
2396 return true;
2398 default:
2399 return false;
2403 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2404 return 1 if every piece is the integer constant one. */
2407 integer_each_onep (const_tree expr)
2409 if (TREE_CODE (expr) == COMPLEX_CST)
2410 return (integer_onep (TREE_REALPART (expr))
2411 && integer_onep (TREE_IMAGPART (expr)));
2412 else
2413 return integer_onep (expr);
2416 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2417 it contains, or a complex or vector whose subparts are such integers. */
2420 integer_all_onesp (const_tree expr)
2422 if (TREE_CODE (expr) == COMPLEX_CST
2423 && integer_all_onesp (TREE_REALPART (expr))
2424 && integer_all_onesp (TREE_IMAGPART (expr)))
2425 return 1;
2427 else if (TREE_CODE (expr) == VECTOR_CST)
2429 unsigned i;
2430 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2431 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2432 return 0;
2433 return 1;
2436 else if (TREE_CODE (expr) != INTEGER_CST)
2437 return 0;
2439 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2440 == wi::to_wide (expr));
2443 /* Return 1 if EXPR is the integer constant minus one. */
2446 integer_minus_onep (const_tree expr)
2448 if (TREE_CODE (expr) == COMPLEX_CST)
2449 return (integer_all_onesp (TREE_REALPART (expr))
2450 && integer_zerop (TREE_IMAGPART (expr)));
2451 else
2452 return integer_all_onesp (expr);
2455 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2456 one bit on). */
2459 integer_pow2p (const_tree expr)
2461 if (TREE_CODE (expr) == COMPLEX_CST
2462 && integer_pow2p (TREE_REALPART (expr))
2463 && integer_zerop (TREE_IMAGPART (expr)))
2464 return 1;
2466 if (TREE_CODE (expr) != INTEGER_CST)
2467 return 0;
2469 return wi::popcount (wi::to_wide (expr)) == 1;
2472 /* Return 1 if EXPR is an integer constant other than zero or a
2473 complex constant other than zero. */
2476 integer_nonzerop (const_tree expr)
2478 return ((TREE_CODE (expr) == INTEGER_CST
2479 && wi::to_wide (expr) != 0)
2480 || (TREE_CODE (expr) == COMPLEX_CST
2481 && (integer_nonzerop (TREE_REALPART (expr))
2482 || integer_nonzerop (TREE_IMAGPART (expr)))));
2485 /* Return 1 if EXPR is the integer constant one. For vector,
2486 return 1 if every piece is the integer constant minus one
2487 (representing the value TRUE). */
2490 integer_truep (const_tree expr)
2492 if (TREE_CODE (expr) == VECTOR_CST)
2493 return integer_all_onesp (expr);
2494 return integer_onep (expr);
2497 /* Return 1 if EXPR is the fixed-point constant zero. */
2500 fixed_zerop (const_tree expr)
2502 return (TREE_CODE (expr) == FIXED_CST
2503 && TREE_FIXED_CST (expr).data.is_zero ());
2506 /* Return the power of two represented by a tree node known to be a
2507 power of two. */
2510 tree_log2 (const_tree expr)
2512 if (TREE_CODE (expr) == COMPLEX_CST)
2513 return tree_log2 (TREE_REALPART (expr));
2515 return wi::exact_log2 (wi::to_wide (expr));
2518 /* Similar, but return the largest integer Y such that 2 ** Y is less
2519 than or equal to EXPR. */
2522 tree_floor_log2 (const_tree expr)
2524 if (TREE_CODE (expr) == COMPLEX_CST)
2525 return tree_log2 (TREE_REALPART (expr));
2527 return wi::floor_log2 (wi::to_wide (expr));
2530 /* Return number of known trailing zero bits in EXPR, or, if the value of
2531 EXPR is known to be zero, the precision of it's type. */
2533 unsigned int
2534 tree_ctz (const_tree expr)
2536 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2537 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2538 return 0;
2540 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2541 switch (TREE_CODE (expr))
2543 case INTEGER_CST:
2544 ret1 = wi::ctz (wi::to_wide (expr));
2545 return MIN (ret1, prec);
2546 case SSA_NAME:
2547 ret1 = wi::ctz (get_nonzero_bits (expr));
2548 return MIN (ret1, prec);
2549 case PLUS_EXPR:
2550 case MINUS_EXPR:
2551 case BIT_IOR_EXPR:
2552 case BIT_XOR_EXPR:
2553 case MIN_EXPR:
2554 case MAX_EXPR:
2555 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2556 if (ret1 == 0)
2557 return ret1;
2558 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2559 return MIN (ret1, ret2);
2560 case POINTER_PLUS_EXPR:
2561 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2562 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2563 /* Second operand is sizetype, which could be in theory
2564 wider than pointer's precision. Make sure we never
2565 return more than prec. */
2566 ret2 = MIN (ret2, prec);
2567 return MIN (ret1, ret2);
2568 case BIT_AND_EXPR:
2569 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2570 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2571 return MAX (ret1, ret2);
2572 case MULT_EXPR:
2573 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2574 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2575 return MIN (ret1 + ret2, prec);
2576 case LSHIFT_EXPR:
2577 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2578 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2579 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2581 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2582 return MIN (ret1 + ret2, prec);
2584 return ret1;
2585 case RSHIFT_EXPR:
2586 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2587 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2589 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2590 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2591 if (ret1 > ret2)
2592 return ret1 - ret2;
2594 return 0;
2595 case TRUNC_DIV_EXPR:
2596 case CEIL_DIV_EXPR:
2597 case FLOOR_DIV_EXPR:
2598 case ROUND_DIV_EXPR:
2599 case EXACT_DIV_EXPR:
2600 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2601 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2603 int l = tree_log2 (TREE_OPERAND (expr, 1));
2604 if (l >= 0)
2606 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2607 ret2 = l;
2608 if (ret1 > ret2)
2609 return ret1 - ret2;
2612 return 0;
2613 CASE_CONVERT:
2614 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2615 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2616 ret1 = prec;
2617 return MIN (ret1, prec);
2618 case SAVE_EXPR:
2619 return tree_ctz (TREE_OPERAND (expr, 0));
2620 case COND_EXPR:
2621 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2622 if (ret1 == 0)
2623 return 0;
2624 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2625 return MIN (ret1, ret2);
2626 case COMPOUND_EXPR:
2627 return tree_ctz (TREE_OPERAND (expr, 1));
2628 case ADDR_EXPR:
2629 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2630 if (ret1 > BITS_PER_UNIT)
2632 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2633 return MIN (ret1, prec);
2635 return 0;
2636 default:
2637 return 0;
2641 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2642 decimal float constants, so don't return 1 for them. */
2645 real_zerop (const_tree expr)
2647 switch (TREE_CODE (expr))
2649 case REAL_CST:
2650 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2651 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2652 case COMPLEX_CST:
2653 return real_zerop (TREE_REALPART (expr))
2654 && real_zerop (TREE_IMAGPART (expr));
2655 case VECTOR_CST:
2657 unsigned i;
2658 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2659 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2660 return false;
2661 return true;
2663 default:
2664 return false;
2668 /* Return 1 if EXPR is the real constant one in real or complex form.
2669 Trailing zeroes matter for decimal float constants, so don't return
2670 1 for them. */
2673 real_onep (const_tree expr)
2675 switch (TREE_CODE (expr))
2677 case REAL_CST:
2678 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2679 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2680 case COMPLEX_CST:
2681 return real_onep (TREE_REALPART (expr))
2682 && real_zerop (TREE_IMAGPART (expr));
2683 case VECTOR_CST:
2685 unsigned i;
2686 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2687 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2688 return false;
2689 return true;
2691 default:
2692 return false;
2696 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2697 matter for decimal float constants, so don't return 1 for them. */
2700 real_minus_onep (const_tree expr)
2702 switch (TREE_CODE (expr))
2704 case REAL_CST:
2705 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2706 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2707 case COMPLEX_CST:
2708 return real_minus_onep (TREE_REALPART (expr))
2709 && real_zerop (TREE_IMAGPART (expr));
2710 case VECTOR_CST:
2712 unsigned i;
2713 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2714 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2715 return false;
2716 return true;
2718 default:
2719 return false;
2723 /* Nonzero if EXP is a constant or a cast of a constant. */
2726 really_constant_p (const_tree exp)
2728 /* This is not quite the same as STRIP_NOPS. It does more. */
2729 while (CONVERT_EXPR_P (exp)
2730 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2731 exp = TREE_OPERAND (exp, 0);
2732 return TREE_CONSTANT (exp);
2735 /* Return first list element whose TREE_VALUE is ELEM.
2736 Return 0 if ELEM is not in LIST. */
2738 tree
2739 value_member (tree elem, tree list)
2741 while (list)
2743 if (elem == TREE_VALUE (list))
2744 return list;
2745 list = TREE_CHAIN (list);
2747 return NULL_TREE;
2750 /* Return first list element whose TREE_PURPOSE is ELEM.
2751 Return 0 if ELEM is not in LIST. */
2753 tree
2754 purpose_member (const_tree elem, tree list)
2756 while (list)
2758 if (elem == TREE_PURPOSE (list))
2759 return list;
2760 list = TREE_CHAIN (list);
2762 return NULL_TREE;
2765 /* Return true if ELEM is in V. */
2767 bool
2768 vec_member (const_tree elem, vec<tree, va_gc> *v)
2770 unsigned ix;
2771 tree t;
2772 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2773 if (elem == t)
2774 return true;
2775 return false;
2778 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2779 NULL_TREE. */
2781 tree
2782 chain_index (int idx, tree chain)
2784 for (; chain && idx > 0; --idx)
2785 chain = TREE_CHAIN (chain);
2786 return chain;
2789 /* Return nonzero if ELEM is part of the chain CHAIN. */
2792 chain_member (const_tree elem, const_tree chain)
2794 while (chain)
2796 if (elem == chain)
2797 return 1;
2798 chain = DECL_CHAIN (chain);
2801 return 0;
2804 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2805 We expect a null pointer to mark the end of the chain.
2806 This is the Lisp primitive `length'. */
2809 list_length (const_tree t)
2811 const_tree p = t;
2812 #ifdef ENABLE_TREE_CHECKING
2813 const_tree q = t;
2814 #endif
2815 int len = 0;
2817 while (p)
2819 p = TREE_CHAIN (p);
2820 #ifdef ENABLE_TREE_CHECKING
2821 if (len % 2)
2822 q = TREE_CHAIN (q);
2823 gcc_assert (p != q);
2824 #endif
2825 len++;
2828 return len;
2831 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2832 UNION_TYPE TYPE, or NULL_TREE if none. */
2834 tree
2835 first_field (const_tree type)
2837 tree t = TYPE_FIELDS (type);
2838 while (t && TREE_CODE (t) != FIELD_DECL)
2839 t = TREE_CHAIN (t);
2840 return t;
2843 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2844 by modifying the last node in chain 1 to point to chain 2.
2845 This is the Lisp primitive `nconc'. */
2847 tree
2848 chainon (tree op1, tree op2)
2850 tree t1;
2852 if (!op1)
2853 return op2;
2854 if (!op2)
2855 return op1;
2857 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2858 continue;
2859 TREE_CHAIN (t1) = op2;
2861 #ifdef ENABLE_TREE_CHECKING
2863 tree t2;
2864 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2865 gcc_assert (t2 != t1);
2867 #endif
2869 return op1;
2872 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2874 tree
2875 tree_last (tree chain)
2877 tree next;
2878 if (chain)
2879 while ((next = TREE_CHAIN (chain)))
2880 chain = next;
2881 return chain;
2884 /* Reverse the order of elements in the chain T,
2885 and return the new head of the chain (old last element). */
2887 tree
2888 nreverse (tree t)
2890 tree prev = 0, decl, next;
2891 for (decl = t; decl; decl = next)
2893 /* We shouldn't be using this function to reverse BLOCK chains; we
2894 have blocks_nreverse for that. */
2895 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2896 next = TREE_CHAIN (decl);
2897 TREE_CHAIN (decl) = prev;
2898 prev = decl;
2900 return prev;
2903 /* Return a newly created TREE_LIST node whose
2904 purpose and value fields are PARM and VALUE. */
2906 tree
2907 build_tree_list (tree parm, tree value MEM_STAT_DECL)
2909 tree t = make_node (TREE_LIST PASS_MEM_STAT);
2910 TREE_PURPOSE (t) = parm;
2911 TREE_VALUE (t) = value;
2912 return t;
2915 /* Build a chain of TREE_LIST nodes from a vector. */
2917 tree
2918 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2920 tree ret = NULL_TREE;
2921 tree *pp = &ret;
2922 unsigned int i;
2923 tree t;
2924 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2926 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
2927 pp = &TREE_CHAIN (*pp);
2929 return ret;
2932 /* Return a newly created TREE_LIST node whose
2933 purpose and value fields are PURPOSE and VALUE
2934 and whose TREE_CHAIN is CHAIN. */
2936 tree
2937 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
2939 tree node;
2941 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2942 memset (node, 0, sizeof (struct tree_common));
2944 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2946 TREE_SET_CODE (node, TREE_LIST);
2947 TREE_CHAIN (node) = chain;
2948 TREE_PURPOSE (node) = purpose;
2949 TREE_VALUE (node) = value;
2950 return node;
2953 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2954 trees. */
2956 vec<tree, va_gc> *
2957 ctor_to_vec (tree ctor)
2959 vec<tree, va_gc> *vec;
2960 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2961 unsigned int ix;
2962 tree val;
2964 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2965 vec->quick_push (val);
2967 return vec;
2970 /* Return the size nominally occupied by an object of type TYPE
2971 when it resides in memory. The value is measured in units of bytes,
2972 and its data type is that normally used for type sizes
2973 (which is the first type created by make_signed_type or
2974 make_unsigned_type). */
2976 tree
2977 size_in_bytes_loc (location_t loc, const_tree type)
2979 tree t;
2981 if (type == error_mark_node)
2982 return integer_zero_node;
2984 type = TYPE_MAIN_VARIANT (type);
2985 t = TYPE_SIZE_UNIT (type);
2987 if (t == 0)
2989 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2990 return size_zero_node;
2993 return t;
2996 /* Return the size of TYPE (in bytes) as a wide integer
2997 or return -1 if the size can vary or is larger than an integer. */
2999 HOST_WIDE_INT
3000 int_size_in_bytes (const_tree type)
3002 tree t;
3004 if (type == error_mark_node)
3005 return 0;
3007 type = TYPE_MAIN_VARIANT (type);
3008 t = TYPE_SIZE_UNIT (type);
3010 if (t && tree_fits_uhwi_p (t))
3011 return TREE_INT_CST_LOW (t);
3012 else
3013 return -1;
3016 /* Return the maximum size of TYPE (in bytes) as a wide integer
3017 or return -1 if the size can vary or is larger than an integer. */
3019 HOST_WIDE_INT
3020 max_int_size_in_bytes (const_tree type)
3022 HOST_WIDE_INT size = -1;
3023 tree size_tree;
3025 /* If this is an array type, check for a possible MAX_SIZE attached. */
3027 if (TREE_CODE (type) == ARRAY_TYPE)
3029 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3031 if (size_tree && tree_fits_uhwi_p (size_tree))
3032 size = tree_to_uhwi (size_tree);
3035 /* If we still haven't been able to get a size, see if the language
3036 can compute a maximum size. */
3038 if (size == -1)
3040 size_tree = lang_hooks.types.max_size (type);
3042 if (size_tree && tree_fits_uhwi_p (size_tree))
3043 size = tree_to_uhwi (size_tree);
3046 return size;
3049 /* Return the bit position of FIELD, in bits from the start of the record.
3050 This is a tree of type bitsizetype. */
3052 tree
3053 bit_position (const_tree field)
3055 return bit_from_pos (DECL_FIELD_OFFSET (field),
3056 DECL_FIELD_BIT_OFFSET (field));
3059 /* Return the byte position of FIELD, in bytes from the start of the record.
3060 This is a tree of type sizetype. */
3062 tree
3063 byte_position (const_tree field)
3065 return byte_from_pos (DECL_FIELD_OFFSET (field),
3066 DECL_FIELD_BIT_OFFSET (field));
3069 /* Likewise, but return as an integer. It must be representable in
3070 that way (since it could be a signed value, we don't have the
3071 option of returning -1 like int_size_in_byte can. */
3073 HOST_WIDE_INT
3074 int_byte_position (const_tree field)
3076 return tree_to_shwi (byte_position (field));
3079 /* Return the strictest alignment, in bits, that T is known to have. */
3081 unsigned int
3082 expr_align (const_tree t)
3084 unsigned int align0, align1;
3086 switch (TREE_CODE (t))
3088 CASE_CONVERT: case NON_LVALUE_EXPR:
3089 /* If we have conversions, we know that the alignment of the
3090 object must meet each of the alignments of the types. */
3091 align0 = expr_align (TREE_OPERAND (t, 0));
3092 align1 = TYPE_ALIGN (TREE_TYPE (t));
3093 return MAX (align0, align1);
3095 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3096 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3097 case CLEANUP_POINT_EXPR:
3098 /* These don't change the alignment of an object. */
3099 return expr_align (TREE_OPERAND (t, 0));
3101 case COND_EXPR:
3102 /* The best we can do is say that the alignment is the least aligned
3103 of the two arms. */
3104 align0 = expr_align (TREE_OPERAND (t, 1));
3105 align1 = expr_align (TREE_OPERAND (t, 2));
3106 return MIN (align0, align1);
3108 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3109 meaningfully, it's always 1. */
3110 case LABEL_DECL: case CONST_DECL:
3111 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3112 case FUNCTION_DECL:
3113 gcc_assert (DECL_ALIGN (t) != 0);
3114 return DECL_ALIGN (t);
3116 default:
3117 break;
3120 /* Otherwise take the alignment from that of the type. */
3121 return TYPE_ALIGN (TREE_TYPE (t));
3124 /* Return, as a tree node, the number of elements for TYPE (which is an
3125 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3127 tree
3128 array_type_nelts (const_tree type)
3130 tree index_type, min, max;
3132 /* If they did it with unspecified bounds, then we should have already
3133 given an error about it before we got here. */
3134 if (! TYPE_DOMAIN (type))
3135 return error_mark_node;
3137 index_type = TYPE_DOMAIN (type);
3138 min = TYPE_MIN_VALUE (index_type);
3139 max = TYPE_MAX_VALUE (index_type);
3141 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3142 if (!max)
3143 return error_mark_node;
3145 return (integer_zerop (min)
3146 ? max
3147 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3150 /* If arg is static -- a reference to an object in static storage -- then
3151 return the object. This is not the same as the C meaning of `static'.
3152 If arg isn't static, return NULL. */
3154 tree
3155 staticp (tree arg)
3157 switch (TREE_CODE (arg))
3159 case FUNCTION_DECL:
3160 /* Nested functions are static, even though taking their address will
3161 involve a trampoline as we unnest the nested function and create
3162 the trampoline on the tree level. */
3163 return arg;
3165 case VAR_DECL:
3166 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3167 && ! DECL_THREAD_LOCAL_P (arg)
3168 && ! DECL_DLLIMPORT_P (arg)
3169 ? arg : NULL);
3171 case CONST_DECL:
3172 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3173 ? arg : NULL);
3175 case CONSTRUCTOR:
3176 return TREE_STATIC (arg) ? arg : NULL;
3178 case LABEL_DECL:
3179 case STRING_CST:
3180 return arg;
3182 case COMPONENT_REF:
3183 /* If the thing being referenced is not a field, then it is
3184 something language specific. */
3185 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3187 /* If we are referencing a bitfield, we can't evaluate an
3188 ADDR_EXPR at compile time and so it isn't a constant. */
3189 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3190 return NULL;
3192 return staticp (TREE_OPERAND (arg, 0));
3194 case BIT_FIELD_REF:
3195 return NULL;
3197 case INDIRECT_REF:
3198 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3200 case ARRAY_REF:
3201 case ARRAY_RANGE_REF:
3202 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3203 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3204 return staticp (TREE_OPERAND (arg, 0));
3205 else
3206 return NULL;
3208 case COMPOUND_LITERAL_EXPR:
3209 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3211 default:
3212 return NULL;
3219 /* Return whether OP is a DECL whose address is function-invariant. */
3221 bool
3222 decl_address_invariant_p (const_tree op)
3224 /* The conditions below are slightly less strict than the one in
3225 staticp. */
3227 switch (TREE_CODE (op))
3229 case PARM_DECL:
3230 case RESULT_DECL:
3231 case LABEL_DECL:
3232 case FUNCTION_DECL:
3233 return true;
3235 case VAR_DECL:
3236 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3237 || DECL_THREAD_LOCAL_P (op)
3238 || DECL_CONTEXT (op) == current_function_decl
3239 || decl_function_context (op) == current_function_decl)
3240 return true;
3241 break;
3243 case CONST_DECL:
3244 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3245 || decl_function_context (op) == current_function_decl)
3246 return true;
3247 break;
3249 default:
3250 break;
3253 return false;
3256 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3258 bool
3259 decl_address_ip_invariant_p (const_tree op)
3261 /* The conditions below are slightly less strict than the one in
3262 staticp. */
3264 switch (TREE_CODE (op))
3266 case LABEL_DECL:
3267 case FUNCTION_DECL:
3268 case STRING_CST:
3269 return true;
3271 case VAR_DECL:
3272 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3273 && !DECL_DLLIMPORT_P (op))
3274 || DECL_THREAD_LOCAL_P (op))
3275 return true;
3276 break;
3278 case CONST_DECL:
3279 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3280 return true;
3281 break;
3283 default:
3284 break;
3287 return false;
3291 /* Return true if T is function-invariant (internal function, does
3292 not handle arithmetic; that's handled in skip_simple_arithmetic and
3293 tree_invariant_p). */
3295 static bool
3296 tree_invariant_p_1 (tree t)
3298 tree op;
3300 if (TREE_CONSTANT (t)
3301 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3302 return true;
3304 switch (TREE_CODE (t))
3306 case SAVE_EXPR:
3307 return true;
3309 case ADDR_EXPR:
3310 op = TREE_OPERAND (t, 0);
3311 while (handled_component_p (op))
3313 switch (TREE_CODE (op))
3315 case ARRAY_REF:
3316 case ARRAY_RANGE_REF:
3317 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3318 || TREE_OPERAND (op, 2) != NULL_TREE
3319 || TREE_OPERAND (op, 3) != NULL_TREE)
3320 return false;
3321 break;
3323 case COMPONENT_REF:
3324 if (TREE_OPERAND (op, 2) != NULL_TREE)
3325 return false;
3326 break;
3328 default:;
3330 op = TREE_OPERAND (op, 0);
3333 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3335 default:
3336 break;
3339 return false;
3342 /* Return true if T is function-invariant. */
3344 bool
3345 tree_invariant_p (tree t)
3347 tree inner = skip_simple_arithmetic (t);
3348 return tree_invariant_p_1 (inner);
3351 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3352 Do this to any expression which may be used in more than one place,
3353 but must be evaluated only once.
3355 Normally, expand_expr would reevaluate the expression each time.
3356 Calling save_expr produces something that is evaluated and recorded
3357 the first time expand_expr is called on it. Subsequent calls to
3358 expand_expr just reuse the recorded value.
3360 The call to expand_expr that generates code that actually computes
3361 the value is the first call *at compile time*. Subsequent calls
3362 *at compile time* generate code to use the saved value.
3363 This produces correct result provided that *at run time* control
3364 always flows through the insns made by the first expand_expr
3365 before reaching the other places where the save_expr was evaluated.
3366 You, the caller of save_expr, must make sure this is so.
3368 Constants, and certain read-only nodes, are returned with no
3369 SAVE_EXPR because that is safe. Expressions containing placeholders
3370 are not touched; see tree.def for an explanation of what these
3371 are used for. */
3373 tree
3374 save_expr (tree expr)
3376 tree inner;
3378 /* If the tree evaluates to a constant, then we don't want to hide that
3379 fact (i.e. this allows further folding, and direct checks for constants).
3380 However, a read-only object that has side effects cannot be bypassed.
3381 Since it is no problem to reevaluate literals, we just return the
3382 literal node. */
3383 inner = skip_simple_arithmetic (expr);
3384 if (TREE_CODE (inner) == ERROR_MARK)
3385 return inner;
3387 if (tree_invariant_p_1 (inner))
3388 return expr;
3390 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3391 it means that the size or offset of some field of an object depends on
3392 the value within another field.
3394 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3395 and some variable since it would then need to be both evaluated once and
3396 evaluated more than once. Front-ends must assure this case cannot
3397 happen by surrounding any such subexpressions in their own SAVE_EXPR
3398 and forcing evaluation at the proper time. */
3399 if (contains_placeholder_p (inner))
3400 return expr;
3402 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3404 /* This expression might be placed ahead of a jump to ensure that the
3405 value was computed on both sides of the jump. So make sure it isn't
3406 eliminated as dead. */
3407 TREE_SIDE_EFFECTS (expr) = 1;
3408 return expr;
3411 /* Look inside EXPR into any simple arithmetic operations. Return the
3412 outermost non-arithmetic or non-invariant node. */
3414 tree
3415 skip_simple_arithmetic (tree expr)
3417 /* We don't care about whether this can be used as an lvalue in this
3418 context. */
3419 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3420 expr = TREE_OPERAND (expr, 0);
3422 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3423 a constant, it will be more efficient to not make another SAVE_EXPR since
3424 it will allow better simplification and GCSE will be able to merge the
3425 computations if they actually occur. */
3426 while (true)
3428 if (UNARY_CLASS_P (expr))
3429 expr = TREE_OPERAND (expr, 0);
3430 else if (BINARY_CLASS_P (expr))
3432 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3433 expr = TREE_OPERAND (expr, 0);
3434 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3435 expr = TREE_OPERAND (expr, 1);
3436 else
3437 break;
3439 else
3440 break;
3443 return expr;
3446 /* Look inside EXPR into simple arithmetic operations involving constants.
3447 Return the outermost non-arithmetic or non-constant node. */
3449 tree
3450 skip_simple_constant_arithmetic (tree expr)
3452 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3453 expr = TREE_OPERAND (expr, 0);
3455 while (true)
3457 if (UNARY_CLASS_P (expr))
3458 expr = TREE_OPERAND (expr, 0);
3459 else if (BINARY_CLASS_P (expr))
3461 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3462 expr = TREE_OPERAND (expr, 0);
3463 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3464 expr = TREE_OPERAND (expr, 1);
3465 else
3466 break;
3468 else
3469 break;
3472 return expr;
3475 /* Return which tree structure is used by T. */
3477 enum tree_node_structure_enum
3478 tree_node_structure (const_tree t)
3480 const enum tree_code code = TREE_CODE (t);
3481 return tree_node_structure_for_code (code);
3484 /* Set various status flags when building a CALL_EXPR object T. */
3486 static void
3487 process_call_operands (tree t)
3489 bool side_effects = TREE_SIDE_EFFECTS (t);
3490 bool read_only = false;
3491 int i = call_expr_flags (t);
3493 /* Calls have side-effects, except those to const or pure functions. */
3494 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3495 side_effects = true;
3496 /* Propagate TREE_READONLY of arguments for const functions. */
3497 if (i & ECF_CONST)
3498 read_only = true;
3500 if (!side_effects || read_only)
3501 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3503 tree op = TREE_OPERAND (t, i);
3504 if (op && TREE_SIDE_EFFECTS (op))
3505 side_effects = true;
3506 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3507 read_only = false;
3510 TREE_SIDE_EFFECTS (t) = side_effects;
3511 TREE_READONLY (t) = read_only;
3514 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3515 size or offset that depends on a field within a record. */
3517 bool
3518 contains_placeholder_p (const_tree exp)
3520 enum tree_code code;
3522 if (!exp)
3523 return 0;
3525 code = TREE_CODE (exp);
3526 if (code == PLACEHOLDER_EXPR)
3527 return 1;
3529 switch (TREE_CODE_CLASS (code))
3531 case tcc_reference:
3532 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3533 position computations since they will be converted into a
3534 WITH_RECORD_EXPR involving the reference, which will assume
3535 here will be valid. */
3536 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3538 case tcc_exceptional:
3539 if (code == TREE_LIST)
3540 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3541 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3542 break;
3544 case tcc_unary:
3545 case tcc_binary:
3546 case tcc_comparison:
3547 case tcc_expression:
3548 switch (code)
3550 case COMPOUND_EXPR:
3551 /* Ignoring the first operand isn't quite right, but works best. */
3552 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3554 case COND_EXPR:
3555 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3556 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3557 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3559 case SAVE_EXPR:
3560 /* The save_expr function never wraps anything containing
3561 a PLACEHOLDER_EXPR. */
3562 return 0;
3564 default:
3565 break;
3568 switch (TREE_CODE_LENGTH (code))
3570 case 1:
3571 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3572 case 2:
3573 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3574 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3575 default:
3576 return 0;
3579 case tcc_vl_exp:
3580 switch (code)
3582 case CALL_EXPR:
3584 const_tree arg;
3585 const_call_expr_arg_iterator iter;
3586 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3587 if (CONTAINS_PLACEHOLDER_P (arg))
3588 return 1;
3589 return 0;
3591 default:
3592 return 0;
3595 default:
3596 return 0;
3598 return 0;
3601 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3602 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3603 field positions. */
3605 static bool
3606 type_contains_placeholder_1 (const_tree type)
3608 /* If the size contains a placeholder or the parent type (component type in
3609 the case of arrays) type involves a placeholder, this type does. */
3610 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3611 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3612 || (!POINTER_TYPE_P (type)
3613 && TREE_TYPE (type)
3614 && type_contains_placeholder_p (TREE_TYPE (type))))
3615 return true;
3617 /* Now do type-specific checks. Note that the last part of the check above
3618 greatly limits what we have to do below. */
3619 switch (TREE_CODE (type))
3621 case VOID_TYPE:
3622 case POINTER_BOUNDS_TYPE:
3623 case COMPLEX_TYPE:
3624 case ENUMERAL_TYPE:
3625 case BOOLEAN_TYPE:
3626 case POINTER_TYPE:
3627 case OFFSET_TYPE:
3628 case REFERENCE_TYPE:
3629 case METHOD_TYPE:
3630 case FUNCTION_TYPE:
3631 case VECTOR_TYPE:
3632 case NULLPTR_TYPE:
3633 return false;
3635 case INTEGER_TYPE:
3636 case REAL_TYPE:
3637 case FIXED_POINT_TYPE:
3638 /* Here we just check the bounds. */
3639 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3640 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3642 case ARRAY_TYPE:
3643 /* We have already checked the component type above, so just check
3644 the domain type. Flexible array members have a null domain. */
3645 return TYPE_DOMAIN (type) ?
3646 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3648 case RECORD_TYPE:
3649 case UNION_TYPE:
3650 case QUAL_UNION_TYPE:
3652 tree field;
3654 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3655 if (TREE_CODE (field) == FIELD_DECL
3656 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3657 || (TREE_CODE (type) == QUAL_UNION_TYPE
3658 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3659 || type_contains_placeholder_p (TREE_TYPE (field))))
3660 return true;
3662 return false;
3665 default:
3666 gcc_unreachable ();
3670 /* Wrapper around above function used to cache its result. */
3672 bool
3673 type_contains_placeholder_p (tree type)
3675 bool result;
3677 /* If the contains_placeholder_bits field has been initialized,
3678 then we know the answer. */
3679 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3680 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3682 /* Indicate that we've seen this type node, and the answer is false.
3683 This is what we want to return if we run into recursion via fields. */
3684 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3686 /* Compute the real value. */
3687 result = type_contains_placeholder_1 (type);
3689 /* Store the real value. */
3690 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3692 return result;
3695 /* Push tree EXP onto vector QUEUE if it is not already present. */
3697 static void
3698 push_without_duplicates (tree exp, vec<tree> *queue)
3700 unsigned int i;
3701 tree iter;
3703 FOR_EACH_VEC_ELT (*queue, i, iter)
3704 if (simple_cst_equal (iter, exp) == 1)
3705 break;
3707 if (!iter)
3708 queue->safe_push (exp);
3711 /* Given a tree EXP, find all occurrences of references to fields
3712 in a PLACEHOLDER_EXPR and place them in vector REFS without
3713 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3714 we assume here that EXP contains only arithmetic expressions
3715 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3716 argument list. */
3718 void
3719 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3721 enum tree_code code = TREE_CODE (exp);
3722 tree inner;
3723 int i;
3725 /* We handle TREE_LIST and COMPONENT_REF separately. */
3726 if (code == TREE_LIST)
3728 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3729 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3731 else if (code == COMPONENT_REF)
3733 for (inner = TREE_OPERAND (exp, 0);
3734 REFERENCE_CLASS_P (inner);
3735 inner = TREE_OPERAND (inner, 0))
3738 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3739 push_without_duplicates (exp, refs);
3740 else
3741 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3743 else
3744 switch (TREE_CODE_CLASS (code))
3746 case tcc_constant:
3747 break;
3749 case tcc_declaration:
3750 /* Variables allocated to static storage can stay. */
3751 if (!TREE_STATIC (exp))
3752 push_without_duplicates (exp, refs);
3753 break;
3755 case tcc_expression:
3756 /* This is the pattern built in ada/make_aligning_type. */
3757 if (code == ADDR_EXPR
3758 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3760 push_without_duplicates (exp, refs);
3761 break;
3764 /* Fall through. */
3766 case tcc_exceptional:
3767 case tcc_unary:
3768 case tcc_binary:
3769 case tcc_comparison:
3770 case tcc_reference:
3771 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3772 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3773 break;
3775 case tcc_vl_exp:
3776 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3777 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3778 break;
3780 default:
3781 gcc_unreachable ();
3785 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3786 return a tree with all occurrences of references to F in a
3787 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3788 CONST_DECLs. Note that we assume here that EXP contains only
3789 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3790 occurring only in their argument list. */
3792 tree
3793 substitute_in_expr (tree exp, tree f, tree r)
3795 enum tree_code code = TREE_CODE (exp);
3796 tree op0, op1, op2, op3;
3797 tree new_tree;
3799 /* We handle TREE_LIST and COMPONENT_REF separately. */
3800 if (code == TREE_LIST)
3802 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3803 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3804 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3805 return exp;
3807 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3809 else if (code == COMPONENT_REF)
3811 tree inner;
3813 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3814 and it is the right field, replace it with R. */
3815 for (inner = TREE_OPERAND (exp, 0);
3816 REFERENCE_CLASS_P (inner);
3817 inner = TREE_OPERAND (inner, 0))
3820 /* The field. */
3821 op1 = TREE_OPERAND (exp, 1);
3823 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3824 return r;
3826 /* If this expression hasn't been completed let, leave it alone. */
3827 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3828 return exp;
3830 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3831 if (op0 == TREE_OPERAND (exp, 0))
3832 return exp;
3834 new_tree
3835 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3837 else
3838 switch (TREE_CODE_CLASS (code))
3840 case tcc_constant:
3841 return exp;
3843 case tcc_declaration:
3844 if (exp == f)
3845 return r;
3846 else
3847 return exp;
3849 case tcc_expression:
3850 if (exp == f)
3851 return r;
3853 /* Fall through. */
3855 case tcc_exceptional:
3856 case tcc_unary:
3857 case tcc_binary:
3858 case tcc_comparison:
3859 case tcc_reference:
3860 switch (TREE_CODE_LENGTH (code))
3862 case 0:
3863 return exp;
3865 case 1:
3866 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3867 if (op0 == TREE_OPERAND (exp, 0))
3868 return exp;
3870 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3871 break;
3873 case 2:
3874 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3875 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3877 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3878 return exp;
3880 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3881 break;
3883 case 3:
3884 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3885 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3886 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3888 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3889 && op2 == TREE_OPERAND (exp, 2))
3890 return exp;
3892 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3893 break;
3895 case 4:
3896 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3897 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3898 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3899 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3901 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3902 && op2 == TREE_OPERAND (exp, 2)
3903 && op3 == TREE_OPERAND (exp, 3))
3904 return exp;
3906 new_tree
3907 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3908 break;
3910 default:
3911 gcc_unreachable ();
3913 break;
3915 case tcc_vl_exp:
3917 int i;
3919 new_tree = NULL_TREE;
3921 /* If we are trying to replace F with a constant or with another
3922 instance of one of the arguments of the call, inline back
3923 functions which do nothing else than computing a value from
3924 the arguments they are passed. This makes it possible to
3925 fold partially or entirely the replacement expression. */
3926 if (code == CALL_EXPR)
3928 bool maybe_inline = false;
3929 if (CONSTANT_CLASS_P (r))
3930 maybe_inline = true;
3931 else
3932 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3933 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3935 maybe_inline = true;
3936 break;
3938 if (maybe_inline)
3940 tree t = maybe_inline_call_in_expr (exp);
3941 if (t)
3942 return SUBSTITUTE_IN_EXPR (t, f, r);
3946 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3948 tree op = TREE_OPERAND (exp, i);
3949 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3950 if (new_op != op)
3952 if (!new_tree)
3953 new_tree = copy_node (exp);
3954 TREE_OPERAND (new_tree, i) = new_op;
3958 if (new_tree)
3960 new_tree = fold (new_tree);
3961 if (TREE_CODE (new_tree) == CALL_EXPR)
3962 process_call_operands (new_tree);
3964 else
3965 return exp;
3967 break;
3969 default:
3970 gcc_unreachable ();
3973 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3975 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3976 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3978 return new_tree;
3981 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3982 for it within OBJ, a tree that is an object or a chain of references. */
3984 tree
3985 substitute_placeholder_in_expr (tree exp, tree obj)
3987 enum tree_code code = TREE_CODE (exp);
3988 tree op0, op1, op2, op3;
3989 tree new_tree;
3991 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3992 in the chain of OBJ. */
3993 if (code == PLACEHOLDER_EXPR)
3995 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3996 tree elt;
3998 for (elt = obj; elt != 0;
3999 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4000 || TREE_CODE (elt) == COND_EXPR)
4001 ? TREE_OPERAND (elt, 1)
4002 : (REFERENCE_CLASS_P (elt)
4003 || UNARY_CLASS_P (elt)
4004 || BINARY_CLASS_P (elt)
4005 || VL_EXP_CLASS_P (elt)
4006 || EXPRESSION_CLASS_P (elt))
4007 ? TREE_OPERAND (elt, 0) : 0))
4008 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4009 return elt;
4011 for (elt = obj; elt != 0;
4012 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4013 || TREE_CODE (elt) == COND_EXPR)
4014 ? TREE_OPERAND (elt, 1)
4015 : (REFERENCE_CLASS_P (elt)
4016 || UNARY_CLASS_P (elt)
4017 || BINARY_CLASS_P (elt)
4018 || VL_EXP_CLASS_P (elt)
4019 || EXPRESSION_CLASS_P (elt))
4020 ? TREE_OPERAND (elt, 0) : 0))
4021 if (POINTER_TYPE_P (TREE_TYPE (elt))
4022 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4023 == need_type))
4024 return fold_build1 (INDIRECT_REF, need_type, elt);
4026 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4027 survives until RTL generation, there will be an error. */
4028 return exp;
4031 /* TREE_LIST is special because we need to look at TREE_VALUE
4032 and TREE_CHAIN, not TREE_OPERANDS. */
4033 else if (code == TREE_LIST)
4035 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4036 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4037 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4038 return exp;
4040 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4042 else
4043 switch (TREE_CODE_CLASS (code))
4045 case tcc_constant:
4046 case tcc_declaration:
4047 return exp;
4049 case tcc_exceptional:
4050 case tcc_unary:
4051 case tcc_binary:
4052 case tcc_comparison:
4053 case tcc_expression:
4054 case tcc_reference:
4055 case tcc_statement:
4056 switch (TREE_CODE_LENGTH (code))
4058 case 0:
4059 return exp;
4061 case 1:
4062 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4063 if (op0 == TREE_OPERAND (exp, 0))
4064 return exp;
4066 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4067 break;
4069 case 2:
4070 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4071 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4073 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4074 return exp;
4076 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4077 break;
4079 case 3:
4080 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4081 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4082 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4084 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4085 && op2 == TREE_OPERAND (exp, 2))
4086 return exp;
4088 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4089 break;
4091 case 4:
4092 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4093 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4094 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4095 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4097 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4098 && op2 == TREE_OPERAND (exp, 2)
4099 && op3 == TREE_OPERAND (exp, 3))
4100 return exp;
4102 new_tree
4103 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4104 break;
4106 default:
4107 gcc_unreachable ();
4109 break;
4111 case tcc_vl_exp:
4113 int i;
4115 new_tree = NULL_TREE;
4117 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4119 tree op = TREE_OPERAND (exp, i);
4120 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4121 if (new_op != op)
4123 if (!new_tree)
4124 new_tree = copy_node (exp);
4125 TREE_OPERAND (new_tree, i) = new_op;
4129 if (new_tree)
4131 new_tree = fold (new_tree);
4132 if (TREE_CODE (new_tree) == CALL_EXPR)
4133 process_call_operands (new_tree);
4135 else
4136 return exp;
4138 break;
4140 default:
4141 gcc_unreachable ();
4144 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4146 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4147 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4149 return new_tree;
4153 /* Subroutine of stabilize_reference; this is called for subtrees of
4154 references. Any expression with side-effects must be put in a SAVE_EXPR
4155 to ensure that it is only evaluated once.
4157 We don't put SAVE_EXPR nodes around everything, because assigning very
4158 simple expressions to temporaries causes us to miss good opportunities
4159 for optimizations. Among other things, the opportunity to fold in the
4160 addition of a constant into an addressing mode often gets lost, e.g.
4161 "y[i+1] += x;". In general, we take the approach that we should not make
4162 an assignment unless we are forced into it - i.e., that any non-side effect
4163 operator should be allowed, and that cse should take care of coalescing
4164 multiple utterances of the same expression should that prove fruitful. */
4166 static tree
4167 stabilize_reference_1 (tree e)
4169 tree result;
4170 enum tree_code code = TREE_CODE (e);
4172 /* We cannot ignore const expressions because it might be a reference
4173 to a const array but whose index contains side-effects. But we can
4174 ignore things that are actual constant or that already have been
4175 handled by this function. */
4177 if (tree_invariant_p (e))
4178 return e;
4180 switch (TREE_CODE_CLASS (code))
4182 case tcc_exceptional:
4183 case tcc_type:
4184 case tcc_declaration:
4185 case tcc_comparison:
4186 case tcc_statement:
4187 case tcc_expression:
4188 case tcc_reference:
4189 case tcc_vl_exp:
4190 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4191 so that it will only be evaluated once. */
4192 /* The reference (r) and comparison (<) classes could be handled as
4193 below, but it is generally faster to only evaluate them once. */
4194 if (TREE_SIDE_EFFECTS (e))
4195 return save_expr (e);
4196 return e;
4198 case tcc_constant:
4199 /* Constants need no processing. In fact, we should never reach
4200 here. */
4201 return e;
4203 case tcc_binary:
4204 /* Division is slow and tends to be compiled with jumps,
4205 especially the division by powers of 2 that is often
4206 found inside of an array reference. So do it just once. */
4207 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4208 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4209 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4210 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4211 return save_expr (e);
4212 /* Recursively stabilize each operand. */
4213 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4214 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4215 break;
4217 case tcc_unary:
4218 /* Recursively stabilize each operand. */
4219 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4220 break;
4222 default:
4223 gcc_unreachable ();
4226 TREE_TYPE (result) = TREE_TYPE (e);
4227 TREE_READONLY (result) = TREE_READONLY (e);
4228 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4229 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4231 return result;
4234 /* Stabilize a reference so that we can use it any number of times
4235 without causing its operands to be evaluated more than once.
4236 Returns the stabilized reference. This works by means of save_expr,
4237 so see the caveats in the comments about save_expr.
4239 Also allows conversion expressions whose operands are references.
4240 Any other kind of expression is returned unchanged. */
4242 tree
4243 stabilize_reference (tree ref)
4245 tree result;
4246 enum tree_code code = TREE_CODE (ref);
4248 switch (code)
4250 case VAR_DECL:
4251 case PARM_DECL:
4252 case RESULT_DECL:
4253 /* No action is needed in this case. */
4254 return ref;
4256 CASE_CONVERT:
4257 case FLOAT_EXPR:
4258 case FIX_TRUNC_EXPR:
4259 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4260 break;
4262 case INDIRECT_REF:
4263 result = build_nt (INDIRECT_REF,
4264 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4265 break;
4267 case COMPONENT_REF:
4268 result = build_nt (COMPONENT_REF,
4269 stabilize_reference (TREE_OPERAND (ref, 0)),
4270 TREE_OPERAND (ref, 1), NULL_TREE);
4271 break;
4273 case BIT_FIELD_REF:
4274 result = build_nt (BIT_FIELD_REF,
4275 stabilize_reference (TREE_OPERAND (ref, 0)),
4276 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4277 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4278 break;
4280 case ARRAY_REF:
4281 result = build_nt (ARRAY_REF,
4282 stabilize_reference (TREE_OPERAND (ref, 0)),
4283 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4284 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4285 break;
4287 case ARRAY_RANGE_REF:
4288 result = build_nt (ARRAY_RANGE_REF,
4289 stabilize_reference (TREE_OPERAND (ref, 0)),
4290 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4291 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4292 break;
4294 case COMPOUND_EXPR:
4295 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4296 it wouldn't be ignored. This matters when dealing with
4297 volatiles. */
4298 return stabilize_reference_1 (ref);
4300 /* If arg isn't a kind of lvalue we recognize, make no change.
4301 Caller should recognize the error for an invalid lvalue. */
4302 default:
4303 return ref;
4305 case ERROR_MARK:
4306 return error_mark_node;
4309 TREE_TYPE (result) = TREE_TYPE (ref);
4310 TREE_READONLY (result) = TREE_READONLY (ref);
4311 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4312 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4314 return result;
4317 /* Low-level constructors for expressions. */
4319 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4320 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4322 void
4323 recompute_tree_invariant_for_addr_expr (tree t)
4325 tree node;
4326 bool tc = true, se = false;
4328 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4330 /* We started out assuming this address is both invariant and constant, but
4331 does not have side effects. Now go down any handled components and see if
4332 any of them involve offsets that are either non-constant or non-invariant.
4333 Also check for side-effects.
4335 ??? Note that this code makes no attempt to deal with the case where
4336 taking the address of something causes a copy due to misalignment. */
4338 #define UPDATE_FLAGS(NODE) \
4339 do { tree _node = (NODE); \
4340 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4341 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4343 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4344 node = TREE_OPERAND (node, 0))
4346 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4347 array reference (probably made temporarily by the G++ front end),
4348 so ignore all the operands. */
4349 if ((TREE_CODE (node) == ARRAY_REF
4350 || TREE_CODE (node) == ARRAY_RANGE_REF)
4351 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4353 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4354 if (TREE_OPERAND (node, 2))
4355 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4356 if (TREE_OPERAND (node, 3))
4357 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4359 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4360 FIELD_DECL, apparently. The G++ front end can put something else
4361 there, at least temporarily. */
4362 else if (TREE_CODE (node) == COMPONENT_REF
4363 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4365 if (TREE_OPERAND (node, 2))
4366 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4370 node = lang_hooks.expr_to_decl (node, &tc, &se);
4372 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4373 the address, since &(*a)->b is a form of addition. If it's a constant, the
4374 address is constant too. If it's a decl, its address is constant if the
4375 decl is static. Everything else is not constant and, furthermore,
4376 taking the address of a volatile variable is not volatile. */
4377 if (TREE_CODE (node) == INDIRECT_REF
4378 || TREE_CODE (node) == MEM_REF)
4379 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4380 else if (CONSTANT_CLASS_P (node))
4382 else if (DECL_P (node))
4383 tc &= (staticp (node) != NULL_TREE);
4384 else
4386 tc = false;
4387 se |= TREE_SIDE_EFFECTS (node);
4391 TREE_CONSTANT (t) = tc;
4392 TREE_SIDE_EFFECTS (t) = se;
4393 #undef UPDATE_FLAGS
4396 /* Build an expression of code CODE, data type TYPE, and operands as
4397 specified. Expressions and reference nodes can be created this way.
4398 Constants, decls, types and misc nodes cannot be.
4400 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4401 enough for all extant tree codes. */
4403 tree
4404 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4406 tree t;
4408 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4410 t = make_node (code PASS_MEM_STAT);
4411 TREE_TYPE (t) = tt;
4413 return t;
4416 tree
4417 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4419 int length = sizeof (struct tree_exp);
4420 tree t;
4422 record_node_allocation_statistics (code, length);
4424 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4426 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4428 memset (t, 0, sizeof (struct tree_common));
4430 TREE_SET_CODE (t, code);
4432 TREE_TYPE (t) = type;
4433 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4434 TREE_OPERAND (t, 0) = node;
4435 if (node && !TYPE_P (node))
4437 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4438 TREE_READONLY (t) = TREE_READONLY (node);
4441 if (TREE_CODE_CLASS (code) == tcc_statement)
4442 TREE_SIDE_EFFECTS (t) = 1;
4443 else switch (code)
4445 case VA_ARG_EXPR:
4446 /* All of these have side-effects, no matter what their
4447 operands are. */
4448 TREE_SIDE_EFFECTS (t) = 1;
4449 TREE_READONLY (t) = 0;
4450 break;
4452 case INDIRECT_REF:
4453 /* Whether a dereference is readonly has nothing to do with whether
4454 its operand is readonly. */
4455 TREE_READONLY (t) = 0;
4456 break;
4458 case ADDR_EXPR:
4459 if (node)
4460 recompute_tree_invariant_for_addr_expr (t);
4461 break;
4463 default:
4464 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4465 && node && !TYPE_P (node)
4466 && TREE_CONSTANT (node))
4467 TREE_CONSTANT (t) = 1;
4468 if (TREE_CODE_CLASS (code) == tcc_reference
4469 && node && TREE_THIS_VOLATILE (node))
4470 TREE_THIS_VOLATILE (t) = 1;
4471 break;
4474 return t;
4477 #define PROCESS_ARG(N) \
4478 do { \
4479 TREE_OPERAND (t, N) = arg##N; \
4480 if (arg##N &&!TYPE_P (arg##N)) \
4482 if (TREE_SIDE_EFFECTS (arg##N)) \
4483 side_effects = 1; \
4484 if (!TREE_READONLY (arg##N) \
4485 && !CONSTANT_CLASS_P (arg##N)) \
4486 (void) (read_only = 0); \
4487 if (!TREE_CONSTANT (arg##N)) \
4488 (void) (constant = 0); \
4490 } while (0)
4492 tree
4493 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4495 bool constant, read_only, side_effects, div_by_zero;
4496 tree t;
4498 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4500 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4501 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4502 /* When sizetype precision doesn't match that of pointers
4503 we need to be able to build explicit extensions or truncations
4504 of the offset argument. */
4505 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4506 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4507 && TREE_CODE (arg1) == INTEGER_CST);
4509 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4510 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4511 && ptrofftype_p (TREE_TYPE (arg1)));
4513 t = make_node (code PASS_MEM_STAT);
4514 TREE_TYPE (t) = tt;
4516 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4517 result based on those same flags for the arguments. But if the
4518 arguments aren't really even `tree' expressions, we shouldn't be trying
4519 to do this. */
4521 /* Expressions without side effects may be constant if their
4522 arguments are as well. */
4523 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4524 || TREE_CODE_CLASS (code) == tcc_binary);
4525 read_only = 1;
4526 side_effects = TREE_SIDE_EFFECTS (t);
4528 switch (code)
4530 case TRUNC_DIV_EXPR:
4531 case CEIL_DIV_EXPR:
4532 case FLOOR_DIV_EXPR:
4533 case ROUND_DIV_EXPR:
4534 case EXACT_DIV_EXPR:
4535 case CEIL_MOD_EXPR:
4536 case FLOOR_MOD_EXPR:
4537 case ROUND_MOD_EXPR:
4538 case TRUNC_MOD_EXPR:
4539 div_by_zero = integer_zerop (arg1);
4540 break;
4541 default:
4542 div_by_zero = false;
4545 PROCESS_ARG (0);
4546 PROCESS_ARG (1);
4548 TREE_SIDE_EFFECTS (t) = side_effects;
4549 if (code == MEM_REF)
4551 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4553 tree o = TREE_OPERAND (arg0, 0);
4554 TREE_READONLY (t) = TREE_READONLY (o);
4555 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4558 else
4560 TREE_READONLY (t) = read_only;
4561 /* Don't mark X / 0 as constant. */
4562 TREE_CONSTANT (t) = constant && !div_by_zero;
4563 TREE_THIS_VOLATILE (t)
4564 = (TREE_CODE_CLASS (code) == tcc_reference
4565 && arg0 && TREE_THIS_VOLATILE (arg0));
4568 return t;
4572 tree
4573 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4574 tree arg2 MEM_STAT_DECL)
4576 bool constant, read_only, side_effects;
4577 tree t;
4579 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4580 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4582 t = make_node (code PASS_MEM_STAT);
4583 TREE_TYPE (t) = tt;
4585 read_only = 1;
4587 /* As a special exception, if COND_EXPR has NULL branches, we
4588 assume that it is a gimple statement and always consider
4589 it to have side effects. */
4590 if (code == COND_EXPR
4591 && tt == void_type_node
4592 && arg1 == NULL_TREE
4593 && arg2 == NULL_TREE)
4594 side_effects = true;
4595 else
4596 side_effects = TREE_SIDE_EFFECTS (t);
4598 PROCESS_ARG (0);
4599 PROCESS_ARG (1);
4600 PROCESS_ARG (2);
4602 if (code == COND_EXPR)
4603 TREE_READONLY (t) = read_only;
4605 TREE_SIDE_EFFECTS (t) = side_effects;
4606 TREE_THIS_VOLATILE (t)
4607 = (TREE_CODE_CLASS (code) == tcc_reference
4608 && arg0 && TREE_THIS_VOLATILE (arg0));
4610 return t;
4613 tree
4614 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4615 tree arg2, tree arg3 MEM_STAT_DECL)
4617 bool constant, read_only, side_effects;
4618 tree t;
4620 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4622 t = make_node (code PASS_MEM_STAT);
4623 TREE_TYPE (t) = tt;
4625 side_effects = TREE_SIDE_EFFECTS (t);
4627 PROCESS_ARG (0);
4628 PROCESS_ARG (1);
4629 PROCESS_ARG (2);
4630 PROCESS_ARG (3);
4632 TREE_SIDE_EFFECTS (t) = side_effects;
4633 TREE_THIS_VOLATILE (t)
4634 = (TREE_CODE_CLASS (code) == tcc_reference
4635 && arg0 && TREE_THIS_VOLATILE (arg0));
4637 return t;
4640 tree
4641 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4642 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4644 bool constant, read_only, side_effects;
4645 tree t;
4647 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4649 t = make_node (code PASS_MEM_STAT);
4650 TREE_TYPE (t) = tt;
4652 side_effects = TREE_SIDE_EFFECTS (t);
4654 PROCESS_ARG (0);
4655 PROCESS_ARG (1);
4656 PROCESS_ARG (2);
4657 PROCESS_ARG (3);
4658 PROCESS_ARG (4);
4660 TREE_SIDE_EFFECTS (t) = side_effects;
4661 if (code == TARGET_MEM_REF)
4663 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4665 tree o = TREE_OPERAND (arg0, 0);
4666 TREE_READONLY (t) = TREE_READONLY (o);
4667 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4670 else
4671 TREE_THIS_VOLATILE (t)
4672 = (TREE_CODE_CLASS (code) == tcc_reference
4673 && arg0 && TREE_THIS_VOLATILE (arg0));
4675 return t;
4678 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4679 on the pointer PTR. */
4681 tree
4682 build_simple_mem_ref_loc (location_t loc, tree ptr)
4684 HOST_WIDE_INT offset = 0;
4685 tree ptype = TREE_TYPE (ptr);
4686 tree tem;
4687 /* For convenience allow addresses that collapse to a simple base
4688 and offset. */
4689 if (TREE_CODE (ptr) == ADDR_EXPR
4690 && (handled_component_p (TREE_OPERAND (ptr, 0))
4691 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4693 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4694 gcc_assert (ptr);
4695 ptr = build_fold_addr_expr (ptr);
4696 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4698 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4699 ptr, build_int_cst (ptype, offset));
4700 SET_EXPR_LOCATION (tem, loc);
4701 return tem;
4704 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4706 offset_int
4707 mem_ref_offset (const_tree t)
4709 return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
4712 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4713 offsetted by OFFSET units. */
4715 tree
4716 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4718 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4719 build_fold_addr_expr (base),
4720 build_int_cst (ptr_type_node, offset));
4721 tree addr = build1 (ADDR_EXPR, type, ref);
4722 recompute_tree_invariant_for_addr_expr (addr);
4723 return addr;
4726 /* Similar except don't specify the TREE_TYPE
4727 and leave the TREE_SIDE_EFFECTS as 0.
4728 It is permissible for arguments to be null,
4729 or even garbage if their values do not matter. */
4731 tree
4732 build_nt (enum tree_code code, ...)
4734 tree t;
4735 int length;
4736 int i;
4737 va_list p;
4739 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4741 va_start (p, code);
4743 t = make_node (code);
4744 length = TREE_CODE_LENGTH (code);
4746 for (i = 0; i < length; i++)
4747 TREE_OPERAND (t, i) = va_arg (p, tree);
4749 va_end (p);
4750 return t;
4753 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4754 tree vec. */
4756 tree
4757 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4759 tree ret, t;
4760 unsigned int ix;
4762 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4763 CALL_EXPR_FN (ret) = fn;
4764 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4765 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4766 CALL_EXPR_ARG (ret, ix) = t;
4767 return ret;
4770 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4771 We do NOT enter this node in any sort of symbol table.
4773 LOC is the location of the decl.
4775 layout_decl is used to set up the decl's storage layout.
4776 Other slots are initialized to 0 or null pointers. */
4778 tree
4779 build_decl (location_t loc, enum tree_code code, tree name,
4780 tree type MEM_STAT_DECL)
4782 tree t;
4784 t = make_node (code PASS_MEM_STAT);
4785 DECL_SOURCE_LOCATION (t) = loc;
4787 /* if (type == error_mark_node)
4788 type = integer_type_node; */
4789 /* That is not done, deliberately, so that having error_mark_node
4790 as the type can suppress useless errors in the use of this variable. */
4792 DECL_NAME (t) = name;
4793 TREE_TYPE (t) = type;
4795 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4796 layout_decl (t, 0);
4798 return t;
4801 /* Builds and returns function declaration with NAME and TYPE. */
4803 tree
4804 build_fn_decl (const char *name, tree type)
4806 tree id = get_identifier (name);
4807 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4809 DECL_EXTERNAL (decl) = 1;
4810 TREE_PUBLIC (decl) = 1;
4811 DECL_ARTIFICIAL (decl) = 1;
4812 TREE_NOTHROW (decl) = 1;
4814 return decl;
4817 vec<tree, va_gc> *all_translation_units;
4819 /* Builds a new translation-unit decl with name NAME, queues it in the
4820 global list of translation-unit decls and returns it. */
4822 tree
4823 build_translation_unit_decl (tree name)
4825 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4826 name, NULL_TREE);
4827 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4828 vec_safe_push (all_translation_units, tu);
4829 return tu;
4833 /* BLOCK nodes are used to represent the structure of binding contours
4834 and declarations, once those contours have been exited and their contents
4835 compiled. This information is used for outputting debugging info. */
4837 tree
4838 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4840 tree block = make_node (BLOCK);
4842 BLOCK_VARS (block) = vars;
4843 BLOCK_SUBBLOCKS (block) = subblocks;
4844 BLOCK_SUPERCONTEXT (block) = supercontext;
4845 BLOCK_CHAIN (block) = chain;
4846 return block;
4850 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4852 LOC is the location to use in tree T. */
4854 void
4855 protected_set_expr_location (tree t, location_t loc)
4857 if (CAN_HAVE_LOCATION_P (t))
4858 SET_EXPR_LOCATION (t, loc);
4861 /* Reset the expression *EXPR_P, a size or position.
4863 ??? We could reset all non-constant sizes or positions. But it's cheap
4864 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4866 We need to reset self-referential sizes or positions because they cannot
4867 be gimplified and thus can contain a CALL_EXPR after the gimplification
4868 is finished, which will run afoul of LTO streaming. And they need to be
4869 reset to something essentially dummy but not constant, so as to preserve
4870 the properties of the object they are attached to. */
4872 static inline void
4873 free_lang_data_in_one_sizepos (tree *expr_p)
4875 tree expr = *expr_p;
4876 if (CONTAINS_PLACEHOLDER_P (expr))
4877 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4881 /* Reset all the fields in a binfo node BINFO. We only keep
4882 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4884 static void
4885 free_lang_data_in_binfo (tree binfo)
4887 unsigned i;
4888 tree t;
4890 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4892 BINFO_VIRTUALS (binfo) = NULL_TREE;
4893 BINFO_BASE_ACCESSES (binfo) = NULL;
4894 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4895 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4897 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4898 free_lang_data_in_binfo (t);
4902 /* Reset all language specific information still present in TYPE. */
4904 static void
4905 free_lang_data_in_type (tree type)
4907 gcc_assert (TYPE_P (type));
4909 /* Give the FE a chance to remove its own data first. */
4910 lang_hooks.free_lang_data (type);
4912 TREE_LANG_FLAG_0 (type) = 0;
4913 TREE_LANG_FLAG_1 (type) = 0;
4914 TREE_LANG_FLAG_2 (type) = 0;
4915 TREE_LANG_FLAG_3 (type) = 0;
4916 TREE_LANG_FLAG_4 (type) = 0;
4917 TREE_LANG_FLAG_5 (type) = 0;
4918 TREE_LANG_FLAG_6 (type) = 0;
4920 if (TREE_CODE (type) == FUNCTION_TYPE)
4922 /* Remove the const and volatile qualifiers from arguments. The
4923 C++ front end removes them, but the C front end does not,
4924 leading to false ODR violation errors when merging two
4925 instances of the same function signature compiled by
4926 different front ends. */
4927 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4929 tree arg_type = TREE_VALUE (p);
4931 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4933 int quals = TYPE_QUALS (arg_type)
4934 & ~TYPE_QUAL_CONST
4935 & ~TYPE_QUAL_VOLATILE;
4936 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4937 free_lang_data_in_type (TREE_VALUE (p));
4939 /* C++ FE uses TREE_PURPOSE to store initial values. */
4940 TREE_PURPOSE (p) = NULL;
4943 else if (TREE_CODE (type) == METHOD_TYPE)
4944 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4945 /* C++ FE uses TREE_PURPOSE to store initial values. */
4946 TREE_PURPOSE (p) = NULL;
4947 else if (RECORD_OR_UNION_TYPE_P (type))
4949 /* Remove members that are not FIELD_DECLs (and maybe
4950 TYPE_DECLs) from the field list of an aggregate. These occur
4951 in C++. */
4952 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
4953 if (TREE_CODE (member) == FIELD_DECL
4954 || (TREE_CODE (member) == TYPE_DECL
4955 && !DECL_IGNORED_P (member)
4956 && debug_info_level > DINFO_LEVEL_TERSE
4957 && !is_redundant_typedef (member)))
4958 prev = &DECL_CHAIN (member);
4959 else
4960 *prev = DECL_CHAIN (member);
4962 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4963 and danagle the pointer from time to time. */
4964 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
4965 TYPE_VFIELD (type) = NULL_TREE;
4967 if (TYPE_BINFO (type))
4969 free_lang_data_in_binfo (TYPE_BINFO (type));
4970 /* We need to preserve link to bases and virtual table for all
4971 polymorphic types to make devirtualization machinery working.
4972 Debug output cares only about bases, but output also
4973 virtual table pointers so merging of -fdevirtualize and
4974 -fno-devirtualize units is easier. */
4975 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4976 || !flag_devirtualize)
4977 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4978 && !BINFO_VTABLE (TYPE_BINFO (type)))
4979 || debug_info_level != DINFO_LEVEL_NONE))
4980 TYPE_BINFO (type) = NULL;
4983 else if (INTEGRAL_TYPE_P (type)
4984 || SCALAR_FLOAT_TYPE_P (type)
4985 || FIXED_POINT_TYPE_P (type))
4987 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4988 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4991 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4993 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4994 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4996 if (TYPE_CONTEXT (type)
4997 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4999 tree ctx = TYPE_CONTEXT (type);
5002 ctx = BLOCK_SUPERCONTEXT (ctx);
5004 while (ctx && TREE_CODE (ctx) == BLOCK);
5005 TYPE_CONTEXT (type) = ctx;
5010 /* Return true if DECL may need an assembler name to be set. */
5012 static inline bool
5013 need_assembler_name_p (tree decl)
5015 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5016 Rule merging. This makes type_odr_p to return true on those types during
5017 LTO and by comparing the mangled name, we can say what types are intended
5018 to be equivalent across compilation unit.
5020 We do not store names of type_in_anonymous_namespace_p.
5022 Record, union and enumeration type have linkage that allows use
5023 to check type_in_anonymous_namespace_p. We do not mangle compound types
5024 that always can be compared structurally.
5026 Similarly for builtin types, we compare properties of their main variant.
5027 A special case are integer types where mangling do make differences
5028 between char/signed char/unsigned char etc. Storing name for these makes
5029 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5030 See cp/mangle.c:write_builtin_type for details. */
5032 if (flag_lto_odr_type_mering
5033 && TREE_CODE (decl) == TYPE_DECL
5034 && DECL_NAME (decl)
5035 && decl == TYPE_NAME (TREE_TYPE (decl))
5036 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5037 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5038 && (type_with_linkage_p (TREE_TYPE (decl))
5039 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5040 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5041 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5042 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5043 if (!VAR_OR_FUNCTION_DECL_P (decl))
5044 return false;
5046 /* If DECL already has its assembler name set, it does not need a
5047 new one. */
5048 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5049 || DECL_ASSEMBLER_NAME_SET_P (decl))
5050 return false;
5052 /* Abstract decls do not need an assembler name. */
5053 if (DECL_ABSTRACT_P (decl))
5054 return false;
5056 /* For VAR_DECLs, only static, public and external symbols need an
5057 assembler name. */
5058 if (VAR_P (decl)
5059 && !TREE_STATIC (decl)
5060 && !TREE_PUBLIC (decl)
5061 && !DECL_EXTERNAL (decl))
5062 return false;
5064 if (TREE_CODE (decl) == FUNCTION_DECL)
5066 /* Do not set assembler name on builtins. Allow RTL expansion to
5067 decide whether to expand inline or via a regular call. */
5068 if (DECL_BUILT_IN (decl)
5069 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5070 return false;
5072 /* Functions represented in the callgraph need an assembler name. */
5073 if (cgraph_node::get (decl) != NULL)
5074 return true;
5076 /* Unused and not public functions don't need an assembler name. */
5077 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5078 return false;
5081 return true;
5085 /* Reset all language specific information still present in symbol
5086 DECL. */
5088 static void
5089 free_lang_data_in_decl (tree decl)
5091 gcc_assert (DECL_P (decl));
5093 /* Give the FE a chance to remove its own data first. */
5094 lang_hooks.free_lang_data (decl);
5096 TREE_LANG_FLAG_0 (decl) = 0;
5097 TREE_LANG_FLAG_1 (decl) = 0;
5098 TREE_LANG_FLAG_2 (decl) = 0;
5099 TREE_LANG_FLAG_3 (decl) = 0;
5100 TREE_LANG_FLAG_4 (decl) = 0;
5101 TREE_LANG_FLAG_5 (decl) = 0;
5102 TREE_LANG_FLAG_6 (decl) = 0;
5104 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5105 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5106 if (TREE_CODE (decl) == FIELD_DECL)
5108 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5109 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5110 DECL_QUALIFIER (decl) = NULL_TREE;
5113 if (TREE_CODE (decl) == FUNCTION_DECL)
5115 struct cgraph_node *node;
5116 if (!(node = cgraph_node::get (decl))
5117 || (!node->definition && !node->clones))
5119 if (node)
5120 node->release_body ();
5121 else
5123 release_function_body (decl);
5124 DECL_ARGUMENTS (decl) = NULL;
5125 DECL_RESULT (decl) = NULL;
5126 DECL_INITIAL (decl) = error_mark_node;
5129 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5131 tree t;
5133 /* If DECL has a gimple body, then the context for its
5134 arguments must be DECL. Otherwise, it doesn't really
5135 matter, as we will not be emitting any code for DECL. In
5136 general, there may be other instances of DECL created by
5137 the front end and since PARM_DECLs are generally shared,
5138 their DECL_CONTEXT changes as the replicas of DECL are
5139 created. The only time where DECL_CONTEXT is important
5140 is for the FUNCTION_DECLs that have a gimple body (since
5141 the PARM_DECL will be used in the function's body). */
5142 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5143 DECL_CONTEXT (t) = decl;
5144 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5145 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5146 = target_option_default_node;
5147 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5148 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5149 = optimization_default_node;
5152 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5153 At this point, it is not needed anymore. */
5154 DECL_SAVED_TREE (decl) = NULL_TREE;
5156 /* Clear the abstract origin if it refers to a method.
5157 Otherwise dwarf2out.c will ICE as we splice functions out of
5158 TYPE_FIELDS and thus the origin will not be output
5159 correctly. */
5160 if (DECL_ABSTRACT_ORIGIN (decl)
5161 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5162 && RECORD_OR_UNION_TYPE_P
5163 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5164 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5166 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5167 DECL_VINDEX referring to itself into a vtable slot number as it
5168 should. Happens with functions that are copied and then forgotten
5169 about. Just clear it, it won't matter anymore. */
5170 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5171 DECL_VINDEX (decl) = NULL_TREE;
5173 else if (VAR_P (decl))
5175 if ((DECL_EXTERNAL (decl)
5176 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5177 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5178 DECL_INITIAL (decl) = NULL_TREE;
5180 else if (TREE_CODE (decl) == TYPE_DECL)
5182 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5183 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5184 DECL_INITIAL (decl) = NULL_TREE;
5186 else if (TREE_CODE (decl) == FIELD_DECL)
5187 DECL_INITIAL (decl) = NULL_TREE;
5188 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5189 && DECL_INITIAL (decl)
5190 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5192 /* Strip builtins from the translation-unit BLOCK. We still have targets
5193 without builtin_decl_explicit support and also builtins are shared
5194 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5195 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5196 while (*nextp)
5198 tree var = *nextp;
5199 if (TREE_CODE (var) == FUNCTION_DECL
5200 && DECL_BUILT_IN (var))
5201 *nextp = TREE_CHAIN (var);
5202 else
5203 nextp = &TREE_CHAIN (var);
5209 /* Data used when collecting DECLs and TYPEs for language data removal. */
5211 struct free_lang_data_d
5213 free_lang_data_d () : decls (100), types (100) {}
5215 /* Worklist to avoid excessive recursion. */
5216 auto_vec<tree> worklist;
5218 /* Set of traversed objects. Used to avoid duplicate visits. */
5219 hash_set<tree> pset;
5221 /* Array of symbols to process with free_lang_data_in_decl. */
5222 auto_vec<tree> decls;
5224 /* Array of types to process with free_lang_data_in_type. */
5225 auto_vec<tree> types;
5229 /* Save all language fields needed to generate proper debug information
5230 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5232 static void
5233 save_debug_info_for_decl (tree t)
5235 /*struct saved_debug_info_d *sdi;*/
5237 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5239 /* FIXME. Partial implementation for saving debug info removed. */
5243 /* Save all language fields needed to generate proper debug information
5244 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5246 static void
5247 save_debug_info_for_type (tree t)
5249 /*struct saved_debug_info_d *sdi;*/
5251 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5253 /* FIXME. Partial implementation for saving debug info removed. */
5257 /* Add type or decl T to one of the list of tree nodes that need their
5258 language data removed. The lists are held inside FLD. */
5260 static void
5261 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5263 if (DECL_P (t))
5265 fld->decls.safe_push (t);
5266 if (debug_info_level > DINFO_LEVEL_TERSE)
5267 save_debug_info_for_decl (t);
5269 else if (TYPE_P (t))
5271 fld->types.safe_push (t);
5272 if (debug_info_level > DINFO_LEVEL_TERSE)
5273 save_debug_info_for_type (t);
5275 else
5276 gcc_unreachable ();
5279 /* Push tree node T into FLD->WORKLIST. */
5281 static inline void
5282 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5284 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5285 fld->worklist.safe_push ((t));
5289 /* Operand callback helper for free_lang_data_in_node. *TP is the
5290 subtree operand being considered. */
5292 static tree
5293 find_decls_types_r (tree *tp, int *ws, void *data)
5295 tree t = *tp;
5296 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5298 if (TREE_CODE (t) == TREE_LIST)
5299 return NULL_TREE;
5301 /* Language specific nodes will be removed, so there is no need
5302 to gather anything under them. */
5303 if (is_lang_specific (t))
5305 *ws = 0;
5306 return NULL_TREE;
5309 if (DECL_P (t))
5311 /* Note that walk_tree does not traverse every possible field in
5312 decls, so we have to do our own traversals here. */
5313 add_tree_to_fld_list (t, fld);
5315 fld_worklist_push (DECL_NAME (t), fld);
5316 fld_worklist_push (DECL_CONTEXT (t), fld);
5317 fld_worklist_push (DECL_SIZE (t), fld);
5318 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5320 /* We are going to remove everything under DECL_INITIAL for
5321 TYPE_DECLs. No point walking them. */
5322 if (TREE_CODE (t) != TYPE_DECL)
5323 fld_worklist_push (DECL_INITIAL (t), fld);
5325 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5326 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5328 if (TREE_CODE (t) == FUNCTION_DECL)
5330 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5331 fld_worklist_push (DECL_RESULT (t), fld);
5333 else if (TREE_CODE (t) == TYPE_DECL)
5335 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5337 else if (TREE_CODE (t) == FIELD_DECL)
5339 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5340 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5341 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5342 fld_worklist_push (DECL_FCONTEXT (t), fld);
5345 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5346 && DECL_HAS_VALUE_EXPR_P (t))
5347 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5349 if (TREE_CODE (t) != FIELD_DECL
5350 && TREE_CODE (t) != TYPE_DECL)
5351 fld_worklist_push (TREE_CHAIN (t), fld);
5352 *ws = 0;
5354 else if (TYPE_P (t))
5356 /* Note that walk_tree does not traverse every possible field in
5357 types, so we have to do our own traversals here. */
5358 add_tree_to_fld_list (t, fld);
5360 if (!RECORD_OR_UNION_TYPE_P (t))
5361 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5362 fld_worklist_push (TYPE_SIZE (t), fld);
5363 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5364 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5365 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5366 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5367 fld_worklist_push (TYPE_NAME (t), fld);
5368 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5369 them and thus do not and want not to reach unused pointer types
5370 this way. */
5371 if (!POINTER_TYPE_P (t))
5372 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5373 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5374 if (!RECORD_OR_UNION_TYPE_P (t))
5375 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5376 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5377 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5378 do not and want not to reach unused variants this way. */
5379 if (TYPE_CONTEXT (t))
5381 tree ctx = TYPE_CONTEXT (t);
5382 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5383 So push that instead. */
5384 while (ctx && TREE_CODE (ctx) == BLOCK)
5385 ctx = BLOCK_SUPERCONTEXT (ctx);
5386 fld_worklist_push (ctx, fld);
5388 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5389 and want not to reach unused types this way. */
5391 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5393 unsigned i;
5394 tree tem;
5395 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5396 fld_worklist_push (TREE_TYPE (tem), fld);
5397 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5399 if (RECORD_OR_UNION_TYPE_P (t))
5401 tree tem;
5402 /* Push all TYPE_FIELDS - there can be interleaving interesting
5403 and non-interesting things. */
5404 tem = TYPE_FIELDS (t);
5405 while (tem)
5407 if (TREE_CODE (tem) == FIELD_DECL
5408 || (TREE_CODE (tem) == TYPE_DECL
5409 && !DECL_IGNORED_P (tem)
5410 && debug_info_level > DINFO_LEVEL_TERSE
5411 && !is_redundant_typedef (tem)))
5412 fld_worklist_push (tem, fld);
5413 tem = TREE_CHAIN (tem);
5417 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5418 *ws = 0;
5420 else if (TREE_CODE (t) == BLOCK)
5422 tree tem;
5423 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5424 fld_worklist_push (tem, fld);
5425 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5426 fld_worklist_push (tem, fld);
5427 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5430 if (TREE_CODE (t) != IDENTIFIER_NODE
5431 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5432 fld_worklist_push (TREE_TYPE (t), fld);
5434 return NULL_TREE;
5438 /* Find decls and types in T. */
5440 static void
5441 find_decls_types (tree t, struct free_lang_data_d *fld)
5443 while (1)
5445 if (!fld->pset.contains (t))
5446 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5447 if (fld->worklist.is_empty ())
5448 break;
5449 t = fld->worklist.pop ();
5453 /* Translate all the types in LIST with the corresponding runtime
5454 types. */
5456 static tree
5457 get_eh_types_for_runtime (tree list)
5459 tree head, prev;
5461 if (list == NULL_TREE)
5462 return NULL_TREE;
5464 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5465 prev = head;
5466 list = TREE_CHAIN (list);
5467 while (list)
5469 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5470 TREE_CHAIN (prev) = n;
5471 prev = TREE_CHAIN (prev);
5472 list = TREE_CHAIN (list);
5475 return head;
5479 /* Find decls and types referenced in EH region R and store them in
5480 FLD->DECLS and FLD->TYPES. */
5482 static void
5483 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5485 switch (r->type)
5487 case ERT_CLEANUP:
5488 break;
5490 case ERT_TRY:
5492 eh_catch c;
5494 /* The types referenced in each catch must first be changed to the
5495 EH types used at runtime. This removes references to FE types
5496 in the region. */
5497 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5499 c->type_list = get_eh_types_for_runtime (c->type_list);
5500 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5503 break;
5505 case ERT_ALLOWED_EXCEPTIONS:
5506 r->u.allowed.type_list
5507 = get_eh_types_for_runtime (r->u.allowed.type_list);
5508 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5509 break;
5511 case ERT_MUST_NOT_THROW:
5512 walk_tree (&r->u.must_not_throw.failure_decl,
5513 find_decls_types_r, fld, &fld->pset);
5514 break;
5519 /* Find decls and types referenced in cgraph node N and store them in
5520 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5521 look for *every* kind of DECL and TYPE node reachable from N,
5522 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5523 NAMESPACE_DECLs, etc). */
5525 static void
5526 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5528 basic_block bb;
5529 struct function *fn;
5530 unsigned ix;
5531 tree t;
5533 find_decls_types (n->decl, fld);
5535 if (!gimple_has_body_p (n->decl))
5536 return;
5538 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5540 fn = DECL_STRUCT_FUNCTION (n->decl);
5542 /* Traverse locals. */
5543 FOR_EACH_LOCAL_DECL (fn, ix, t)
5544 find_decls_types (t, fld);
5546 /* Traverse EH regions in FN. */
5548 eh_region r;
5549 FOR_ALL_EH_REGION_FN (r, fn)
5550 find_decls_types_in_eh_region (r, fld);
5553 /* Traverse every statement in FN. */
5554 FOR_EACH_BB_FN (bb, fn)
5556 gphi_iterator psi;
5557 gimple_stmt_iterator si;
5558 unsigned i;
5560 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5562 gphi *phi = psi.phi ();
5564 for (i = 0; i < gimple_phi_num_args (phi); i++)
5566 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5567 find_decls_types (*arg_p, fld);
5571 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5573 gimple *stmt = gsi_stmt (si);
5575 if (is_gimple_call (stmt))
5576 find_decls_types (gimple_call_fntype (stmt), fld);
5578 for (i = 0; i < gimple_num_ops (stmt); i++)
5580 tree arg = gimple_op (stmt, i);
5581 find_decls_types (arg, fld);
5588 /* Find decls and types referenced in varpool node N and store them in
5589 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5590 look for *every* kind of DECL and TYPE node reachable from N,
5591 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5592 NAMESPACE_DECLs, etc). */
5594 static void
5595 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5597 find_decls_types (v->decl, fld);
5600 /* If T needs an assembler name, have one created for it. */
5602 void
5603 assign_assembler_name_if_needed (tree t)
5605 if (need_assembler_name_p (t))
5607 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5608 diagnostics that use input_location to show locus
5609 information. The problem here is that, at this point,
5610 input_location is generally anchored to the end of the file
5611 (since the parser is long gone), so we don't have a good
5612 position to pin it to.
5614 To alleviate this problem, this uses the location of T's
5615 declaration. Examples of this are
5616 testsuite/g++.dg/template/cond2.C and
5617 testsuite/g++.dg/template/pr35240.C. */
5618 location_t saved_location = input_location;
5619 input_location = DECL_SOURCE_LOCATION (t);
5621 decl_assembler_name (t);
5623 input_location = saved_location;
5628 /* Free language specific information for every operand and expression
5629 in every node of the call graph. This process operates in three stages:
5631 1- Every callgraph node and varpool node is traversed looking for
5632 decls and types embedded in them. This is a more exhaustive
5633 search than that done by find_referenced_vars, because it will
5634 also collect individual fields, decls embedded in types, etc.
5636 2- All the decls found are sent to free_lang_data_in_decl.
5638 3- All the types found are sent to free_lang_data_in_type.
5640 The ordering between decls and types is important because
5641 free_lang_data_in_decl sets assembler names, which includes
5642 mangling. So types cannot be freed up until assembler names have
5643 been set up. */
5645 static void
5646 free_lang_data_in_cgraph (void)
5648 struct cgraph_node *n;
5649 varpool_node *v;
5650 struct free_lang_data_d fld;
5651 tree t;
5652 unsigned i;
5653 alias_pair *p;
5655 /* Find decls and types in the body of every function in the callgraph. */
5656 FOR_EACH_FUNCTION (n)
5657 find_decls_types_in_node (n, &fld);
5659 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5660 find_decls_types (p->decl, &fld);
5662 /* Find decls and types in every varpool symbol. */
5663 FOR_EACH_VARIABLE (v)
5664 find_decls_types_in_var (v, &fld);
5666 /* Set the assembler name on every decl found. We need to do this
5667 now because free_lang_data_in_decl will invalidate data needed
5668 for mangling. This breaks mangling on interdependent decls. */
5669 FOR_EACH_VEC_ELT (fld.decls, i, t)
5670 assign_assembler_name_if_needed (t);
5672 /* Traverse every decl found freeing its language data. */
5673 FOR_EACH_VEC_ELT (fld.decls, i, t)
5674 free_lang_data_in_decl (t);
5676 /* Traverse every type found freeing its language data. */
5677 FOR_EACH_VEC_ELT (fld.types, i, t)
5678 free_lang_data_in_type (t);
5679 if (flag_checking)
5681 FOR_EACH_VEC_ELT (fld.types, i, t)
5682 verify_type (t);
5687 /* Free resources that are used by FE but are not needed once they are done. */
5689 static unsigned
5690 free_lang_data (void)
5692 unsigned i;
5694 /* If we are the LTO frontend we have freed lang-specific data already. */
5695 if (in_lto_p
5696 || (!flag_generate_lto && !flag_generate_offload))
5697 return 0;
5699 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5700 if (vec_safe_is_empty (all_translation_units))
5701 build_translation_unit_decl (NULL_TREE);
5703 /* Allocate and assign alias sets to the standard integer types
5704 while the slots are still in the way the frontends generated them. */
5705 for (i = 0; i < itk_none; ++i)
5706 if (integer_types[i])
5707 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5709 /* Traverse the IL resetting language specific information for
5710 operands, expressions, etc. */
5711 free_lang_data_in_cgraph ();
5713 /* Create gimple variants for common types. */
5714 for (unsigned i = 0;
5715 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5716 ++i)
5717 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5719 /* Reset some langhooks. Do not reset types_compatible_p, it may
5720 still be used indirectly via the get_alias_set langhook. */
5721 lang_hooks.dwarf_name = lhd_dwarf_name;
5722 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5723 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5725 /* We do not want the default decl_assembler_name implementation,
5726 rather if we have fixed everything we want a wrapper around it
5727 asserting that all non-local symbols already got their assembler
5728 name and only produce assembler names for local symbols. Or rather
5729 make sure we never call decl_assembler_name on local symbols and
5730 devise a separate, middle-end private scheme for it. */
5732 /* Reset diagnostic machinery. */
5733 tree_diagnostics_defaults (global_dc);
5735 return 0;
5739 namespace {
5741 const pass_data pass_data_ipa_free_lang_data =
5743 SIMPLE_IPA_PASS, /* type */
5744 "*free_lang_data", /* name */
5745 OPTGROUP_NONE, /* optinfo_flags */
5746 TV_IPA_FREE_LANG_DATA, /* tv_id */
5747 0, /* properties_required */
5748 0, /* properties_provided */
5749 0, /* properties_destroyed */
5750 0, /* todo_flags_start */
5751 0, /* todo_flags_finish */
5754 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5756 public:
5757 pass_ipa_free_lang_data (gcc::context *ctxt)
5758 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5761 /* opt_pass methods: */
5762 virtual unsigned int execute (function *) { return free_lang_data (); }
5764 }; // class pass_ipa_free_lang_data
5766 } // anon namespace
5768 simple_ipa_opt_pass *
5769 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5771 return new pass_ipa_free_lang_data (ctxt);
5774 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5775 of the various TYPE_QUAL values. */
5777 static void
5778 set_type_quals (tree type, int type_quals)
5780 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5781 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5782 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5783 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5784 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5787 /* Returns true iff CAND and BASE have equivalent language-specific
5788 qualifiers. */
5790 bool
5791 check_lang_type (const_tree cand, const_tree base)
5793 if (lang_hooks.types.type_hash_eq == NULL)
5794 return true;
5795 /* type_hash_eq currently only applies to these types. */
5796 if (TREE_CODE (cand) != FUNCTION_TYPE
5797 && TREE_CODE (cand) != METHOD_TYPE)
5798 return true;
5799 return lang_hooks.types.type_hash_eq (cand, base);
5802 /* Returns true iff unqualified CAND and BASE are equivalent. */
5804 bool
5805 check_base_type (const_tree cand, const_tree base)
5807 return (TYPE_NAME (cand) == TYPE_NAME (base)
5808 /* Apparently this is needed for Objective-C. */
5809 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5810 /* Check alignment. */
5811 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5812 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5813 TYPE_ATTRIBUTES (base)));
5816 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5818 bool
5819 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5821 return (TYPE_QUALS (cand) == type_quals
5822 && check_base_type (cand, base)
5823 && check_lang_type (cand, base));
5826 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5828 static bool
5829 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5831 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5832 && TYPE_NAME (cand) == TYPE_NAME (base)
5833 /* Apparently this is needed for Objective-C. */
5834 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5835 /* Check alignment. */
5836 && TYPE_ALIGN (cand) == align
5837 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5838 TYPE_ATTRIBUTES (base))
5839 && check_lang_type (cand, base));
5842 /* This function checks to see if TYPE matches the size one of the built-in
5843 atomic types, and returns that core atomic type. */
5845 static tree
5846 find_atomic_core_type (tree type)
5848 tree base_atomic_type;
5850 /* Only handle complete types. */
5851 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5852 return NULL_TREE;
5854 switch (tree_to_uhwi (TYPE_SIZE (type)))
5856 case 8:
5857 base_atomic_type = atomicQI_type_node;
5858 break;
5860 case 16:
5861 base_atomic_type = atomicHI_type_node;
5862 break;
5864 case 32:
5865 base_atomic_type = atomicSI_type_node;
5866 break;
5868 case 64:
5869 base_atomic_type = atomicDI_type_node;
5870 break;
5872 case 128:
5873 base_atomic_type = atomicTI_type_node;
5874 break;
5876 default:
5877 base_atomic_type = NULL_TREE;
5880 return base_atomic_type;
5883 /* Return a version of the TYPE, qualified as indicated by the
5884 TYPE_QUALS, if one exists. If no qualified version exists yet,
5885 return NULL_TREE. */
5887 tree
5888 get_qualified_type (tree type, int type_quals)
5890 tree t;
5892 if (TYPE_QUALS (type) == type_quals)
5893 return type;
5895 /* Search the chain of variants to see if there is already one there just
5896 like the one we need to have. If so, use that existing one. We must
5897 preserve the TYPE_NAME, since there is code that depends on this. */
5898 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5899 if (check_qualified_type (t, type, type_quals))
5900 return t;
5902 return NULL_TREE;
5905 /* Like get_qualified_type, but creates the type if it does not
5906 exist. This function never returns NULL_TREE. */
5908 tree
5909 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5911 tree t;
5913 /* See if we already have the appropriate qualified variant. */
5914 t = get_qualified_type (type, type_quals);
5916 /* If not, build it. */
5917 if (!t)
5919 t = build_variant_type_copy (type PASS_MEM_STAT);
5920 set_type_quals (t, type_quals);
5922 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5924 /* See if this object can map to a basic atomic type. */
5925 tree atomic_type = find_atomic_core_type (type);
5926 if (atomic_type)
5928 /* Ensure the alignment of this type is compatible with
5929 the required alignment of the atomic type. */
5930 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5931 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5935 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5936 /* Propagate structural equality. */
5937 SET_TYPE_STRUCTURAL_EQUALITY (t);
5938 else if (TYPE_CANONICAL (type) != type)
5939 /* Build the underlying canonical type, since it is different
5940 from TYPE. */
5942 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5943 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5945 else
5946 /* T is its own canonical type. */
5947 TYPE_CANONICAL (t) = t;
5951 return t;
5954 /* Create a variant of type T with alignment ALIGN. */
5956 tree
5957 build_aligned_type (tree type, unsigned int align)
5959 tree t;
5961 if (TYPE_PACKED (type)
5962 || TYPE_ALIGN (type) == align)
5963 return type;
5965 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5966 if (check_aligned_type (t, type, align))
5967 return t;
5969 t = build_variant_type_copy (type);
5970 SET_TYPE_ALIGN (t, align);
5971 TYPE_USER_ALIGN (t) = 1;
5973 return t;
5976 /* Create a new distinct copy of TYPE. The new type is made its own
5977 MAIN_VARIANT. If TYPE requires structural equality checks, the
5978 resulting type requires structural equality checks; otherwise, its
5979 TYPE_CANONICAL points to itself. */
5981 tree
5982 build_distinct_type_copy (tree type MEM_STAT_DECL)
5984 tree t = copy_node (type PASS_MEM_STAT);
5986 TYPE_POINTER_TO (t) = 0;
5987 TYPE_REFERENCE_TO (t) = 0;
5989 /* Set the canonical type either to a new equivalence class, or
5990 propagate the need for structural equality checks. */
5991 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5992 SET_TYPE_STRUCTURAL_EQUALITY (t);
5993 else
5994 TYPE_CANONICAL (t) = t;
5996 /* Make it its own variant. */
5997 TYPE_MAIN_VARIANT (t) = t;
5998 TYPE_NEXT_VARIANT (t) = 0;
6000 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6001 whose TREE_TYPE is not t. This can also happen in the Ada
6002 frontend when using subtypes. */
6004 return t;
6007 /* Create a new variant of TYPE, equivalent but distinct. This is so
6008 the caller can modify it. TYPE_CANONICAL for the return type will
6009 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6010 are considered equal by the language itself (or that both types
6011 require structural equality checks). */
6013 tree
6014 build_variant_type_copy (tree type MEM_STAT_DECL)
6016 tree t, m = TYPE_MAIN_VARIANT (type);
6018 t = build_distinct_type_copy (type PASS_MEM_STAT);
6020 /* Since we're building a variant, assume that it is a non-semantic
6021 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6022 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6023 /* Type variants have no alias set defined. */
6024 TYPE_ALIAS_SET (t) = -1;
6026 /* Add the new type to the chain of variants of TYPE. */
6027 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6028 TYPE_NEXT_VARIANT (m) = t;
6029 TYPE_MAIN_VARIANT (t) = m;
6031 return t;
6034 /* Return true if the from tree in both tree maps are equal. */
6037 tree_map_base_eq (const void *va, const void *vb)
6039 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6040 *const b = (const struct tree_map_base *) vb;
6041 return (a->from == b->from);
6044 /* Hash a from tree in a tree_base_map. */
6046 unsigned int
6047 tree_map_base_hash (const void *item)
6049 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6052 /* Return true if this tree map structure is marked for garbage collection
6053 purposes. We simply return true if the from tree is marked, so that this
6054 structure goes away when the from tree goes away. */
6057 tree_map_base_marked_p (const void *p)
6059 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6062 /* Hash a from tree in a tree_map. */
6064 unsigned int
6065 tree_map_hash (const void *item)
6067 return (((const struct tree_map *) item)->hash);
6070 /* Hash a from tree in a tree_decl_map. */
6072 unsigned int
6073 tree_decl_map_hash (const void *item)
6075 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6078 /* Return the initialization priority for DECL. */
6080 priority_type
6081 decl_init_priority_lookup (tree decl)
6083 symtab_node *snode = symtab_node::get (decl);
6085 if (!snode)
6086 return DEFAULT_INIT_PRIORITY;
6087 return
6088 snode->get_init_priority ();
6091 /* Return the finalization priority for DECL. */
6093 priority_type
6094 decl_fini_priority_lookup (tree decl)
6096 cgraph_node *node = cgraph_node::get (decl);
6098 if (!node)
6099 return DEFAULT_INIT_PRIORITY;
6100 return
6101 node->get_fini_priority ();
6104 /* Set the initialization priority for DECL to PRIORITY. */
6106 void
6107 decl_init_priority_insert (tree decl, priority_type priority)
6109 struct symtab_node *snode;
6111 if (priority == DEFAULT_INIT_PRIORITY)
6113 snode = symtab_node::get (decl);
6114 if (!snode)
6115 return;
6117 else if (VAR_P (decl))
6118 snode = varpool_node::get_create (decl);
6119 else
6120 snode = cgraph_node::get_create (decl);
6121 snode->set_init_priority (priority);
6124 /* Set the finalization priority for DECL to PRIORITY. */
6126 void
6127 decl_fini_priority_insert (tree decl, priority_type priority)
6129 struct cgraph_node *node;
6131 if (priority == DEFAULT_INIT_PRIORITY)
6133 node = cgraph_node::get (decl);
6134 if (!node)
6135 return;
6137 else
6138 node = cgraph_node::get_create (decl);
6139 node->set_fini_priority (priority);
6142 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6144 static void
6145 print_debug_expr_statistics (void)
6147 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6148 (long) debug_expr_for_decl->size (),
6149 (long) debug_expr_for_decl->elements (),
6150 debug_expr_for_decl->collisions ());
6153 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6155 static void
6156 print_value_expr_statistics (void)
6158 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6159 (long) value_expr_for_decl->size (),
6160 (long) value_expr_for_decl->elements (),
6161 value_expr_for_decl->collisions ());
6164 /* Lookup a debug expression for FROM, and return it if we find one. */
6166 tree
6167 decl_debug_expr_lookup (tree from)
6169 struct tree_decl_map *h, in;
6170 in.base.from = from;
6172 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6173 if (h)
6174 return h->to;
6175 return NULL_TREE;
6178 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6180 void
6181 decl_debug_expr_insert (tree from, tree to)
6183 struct tree_decl_map *h;
6185 h = ggc_alloc<tree_decl_map> ();
6186 h->base.from = from;
6187 h->to = to;
6188 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6191 /* Lookup a value expression for FROM, and return it if we find one. */
6193 tree
6194 decl_value_expr_lookup (tree from)
6196 struct tree_decl_map *h, in;
6197 in.base.from = from;
6199 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6200 if (h)
6201 return h->to;
6202 return NULL_TREE;
6205 /* Insert a mapping FROM->TO in the value expression hashtable. */
6207 void
6208 decl_value_expr_insert (tree from, tree to)
6210 struct tree_decl_map *h;
6212 h = ggc_alloc<tree_decl_map> ();
6213 h->base.from = from;
6214 h->to = to;
6215 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6218 /* Lookup a vector of debug arguments for FROM, and return it if we
6219 find one. */
6221 vec<tree, va_gc> **
6222 decl_debug_args_lookup (tree from)
6224 struct tree_vec_map *h, in;
6226 if (!DECL_HAS_DEBUG_ARGS_P (from))
6227 return NULL;
6228 gcc_checking_assert (debug_args_for_decl != NULL);
6229 in.base.from = from;
6230 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6231 if (h)
6232 return &h->to;
6233 return NULL;
6236 /* Insert a mapping FROM->empty vector of debug arguments in the value
6237 expression hashtable. */
6239 vec<tree, va_gc> **
6240 decl_debug_args_insert (tree from)
6242 struct tree_vec_map *h;
6243 tree_vec_map **loc;
6245 if (DECL_HAS_DEBUG_ARGS_P (from))
6246 return decl_debug_args_lookup (from);
6247 if (debug_args_for_decl == NULL)
6248 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6249 h = ggc_alloc<tree_vec_map> ();
6250 h->base.from = from;
6251 h->to = NULL;
6252 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6253 *loc = h;
6254 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6255 return &h->to;
6258 /* Hashing of types so that we don't make duplicates.
6259 The entry point is `type_hash_canon'. */
6261 /* Generate the default hash code for TYPE. This is designed for
6262 speed, rather than maximum entropy. */
6264 hashval_t
6265 type_hash_canon_hash (tree type)
6267 inchash::hash hstate;
6269 hstate.add_int (TREE_CODE (type));
6271 if (TREE_TYPE (type))
6272 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6274 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6275 /* Just the identifier is adequate to distinguish. */
6276 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6278 switch (TREE_CODE (type))
6280 case METHOD_TYPE:
6281 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6282 /* FALLTHROUGH. */
6283 case FUNCTION_TYPE:
6284 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6285 if (TREE_VALUE (t) != error_mark_node)
6286 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6287 break;
6289 case OFFSET_TYPE:
6290 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6291 break;
6293 case ARRAY_TYPE:
6295 if (TYPE_DOMAIN (type))
6296 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6297 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6299 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6300 hstate.add_object (typeless);
6303 break;
6305 case INTEGER_TYPE:
6307 tree t = TYPE_MAX_VALUE (type);
6308 if (!t)
6309 t = TYPE_MIN_VALUE (type);
6310 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6311 hstate.add_object (TREE_INT_CST_ELT (t, i));
6312 break;
6315 case REAL_TYPE:
6316 case FIXED_POINT_TYPE:
6318 unsigned prec = TYPE_PRECISION (type);
6319 hstate.add_object (prec);
6320 break;
6323 case VECTOR_TYPE:
6325 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
6326 hstate.add_object (nunits);
6327 break;
6330 default:
6331 break;
6334 return hstate.end ();
6337 /* These are the Hashtable callback functions. */
6339 /* Returns true iff the types are equivalent. */
6341 bool
6342 type_cache_hasher::equal (type_hash *a, type_hash *b)
6344 /* First test the things that are the same for all types. */
6345 if (a->hash != b->hash
6346 || TREE_CODE (a->type) != TREE_CODE (b->type)
6347 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6348 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6349 TYPE_ATTRIBUTES (b->type))
6350 || (TREE_CODE (a->type) != COMPLEX_TYPE
6351 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6352 return 0;
6354 /* Be careful about comparing arrays before and after the element type
6355 has been completed; don't compare TYPE_ALIGN unless both types are
6356 complete. */
6357 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6358 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6359 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6360 return 0;
6362 switch (TREE_CODE (a->type))
6364 case VOID_TYPE:
6365 case COMPLEX_TYPE:
6366 case POINTER_TYPE:
6367 case REFERENCE_TYPE:
6368 case NULLPTR_TYPE:
6369 return 1;
6371 case VECTOR_TYPE:
6372 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6374 case ENUMERAL_TYPE:
6375 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6376 && !(TYPE_VALUES (a->type)
6377 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6378 && TYPE_VALUES (b->type)
6379 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6380 && type_list_equal (TYPE_VALUES (a->type),
6381 TYPE_VALUES (b->type))))
6382 return 0;
6384 /* fall through */
6386 case INTEGER_TYPE:
6387 case REAL_TYPE:
6388 case BOOLEAN_TYPE:
6389 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6390 return false;
6391 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6392 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6393 TYPE_MAX_VALUE (b->type)))
6394 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6395 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6396 TYPE_MIN_VALUE (b->type))));
6398 case FIXED_POINT_TYPE:
6399 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6401 case OFFSET_TYPE:
6402 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6404 case METHOD_TYPE:
6405 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6406 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6407 || (TYPE_ARG_TYPES (a->type)
6408 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6409 && TYPE_ARG_TYPES (b->type)
6410 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6411 && type_list_equal (TYPE_ARG_TYPES (a->type),
6412 TYPE_ARG_TYPES (b->type)))))
6413 break;
6414 return 0;
6415 case ARRAY_TYPE:
6416 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6417 where the flag should be inherited from the element type
6418 and can change after ARRAY_TYPEs are created; on non-aggregates
6419 compare it and hash it, scalars will never have that flag set
6420 and we need to differentiate between arrays created by different
6421 front-ends or middle-end created arrays. */
6422 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6423 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6424 || (TYPE_TYPELESS_STORAGE (a->type)
6425 == TYPE_TYPELESS_STORAGE (b->type))));
6427 case RECORD_TYPE:
6428 case UNION_TYPE:
6429 case QUAL_UNION_TYPE:
6430 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6431 || (TYPE_FIELDS (a->type)
6432 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6433 && TYPE_FIELDS (b->type)
6434 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6435 && type_list_equal (TYPE_FIELDS (a->type),
6436 TYPE_FIELDS (b->type))));
6438 case FUNCTION_TYPE:
6439 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6440 || (TYPE_ARG_TYPES (a->type)
6441 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6442 && TYPE_ARG_TYPES (b->type)
6443 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6444 && type_list_equal (TYPE_ARG_TYPES (a->type),
6445 TYPE_ARG_TYPES (b->type))))
6446 break;
6447 return 0;
6449 default:
6450 return 0;
6453 if (lang_hooks.types.type_hash_eq != NULL)
6454 return lang_hooks.types.type_hash_eq (a->type, b->type);
6456 return 1;
6459 /* Given TYPE, and HASHCODE its hash code, return the canonical
6460 object for an identical type if one already exists.
6461 Otherwise, return TYPE, and record it as the canonical object.
6463 To use this function, first create a type of the sort you want.
6464 Then compute its hash code from the fields of the type that
6465 make it different from other similar types.
6466 Then call this function and use the value. */
6468 tree
6469 type_hash_canon (unsigned int hashcode, tree type)
6471 type_hash in;
6472 type_hash **loc;
6474 /* The hash table only contains main variants, so ensure that's what we're
6475 being passed. */
6476 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6478 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6479 must call that routine before comparing TYPE_ALIGNs. */
6480 layout_type (type);
6482 in.hash = hashcode;
6483 in.type = type;
6485 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6486 if (*loc)
6488 tree t1 = ((type_hash *) *loc)->type;
6489 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6490 if (TYPE_UID (type) + 1 == next_type_uid)
6491 --next_type_uid;
6492 /* Free also min/max values and the cache for integer
6493 types. This can't be done in free_node, as LTO frees
6494 those on its own. */
6495 if (TREE_CODE (type) == INTEGER_TYPE)
6497 if (TYPE_MIN_VALUE (type)
6498 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6499 ggc_free (TYPE_MIN_VALUE (type));
6500 if (TYPE_MAX_VALUE (type)
6501 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6502 ggc_free (TYPE_MAX_VALUE (type));
6503 if (TYPE_CACHED_VALUES_P (type))
6504 ggc_free (TYPE_CACHED_VALUES (type));
6506 free_node (type);
6507 return t1;
6509 else
6511 struct type_hash *h;
6513 h = ggc_alloc<type_hash> ();
6514 h->hash = hashcode;
6515 h->type = type;
6516 *loc = h;
6518 return type;
6522 static void
6523 print_type_hash_statistics (void)
6525 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6526 (long) type_hash_table->size (),
6527 (long) type_hash_table->elements (),
6528 type_hash_table->collisions ());
6531 /* Given two lists of types
6532 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6533 return 1 if the lists contain the same types in the same order.
6534 Also, the TREE_PURPOSEs must match. */
6537 type_list_equal (const_tree l1, const_tree l2)
6539 const_tree t1, t2;
6541 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6542 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6543 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6544 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6545 && (TREE_TYPE (TREE_PURPOSE (t1))
6546 == TREE_TYPE (TREE_PURPOSE (t2))))))
6547 return 0;
6549 return t1 == t2;
6552 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6553 given by TYPE. If the argument list accepts variable arguments,
6554 then this function counts only the ordinary arguments. */
6557 type_num_arguments (const_tree type)
6559 int i = 0;
6560 tree t;
6562 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6563 /* If the function does not take a variable number of arguments,
6564 the last element in the list will have type `void'. */
6565 if (VOID_TYPE_P (TREE_VALUE (t)))
6566 break;
6567 else
6568 ++i;
6570 return i;
6573 /* Nonzero if integer constants T1 and T2
6574 represent the same constant value. */
6577 tree_int_cst_equal (const_tree t1, const_tree t2)
6579 if (t1 == t2)
6580 return 1;
6582 if (t1 == 0 || t2 == 0)
6583 return 0;
6585 if (TREE_CODE (t1) == INTEGER_CST
6586 && TREE_CODE (t2) == INTEGER_CST
6587 && wi::to_widest (t1) == wi::to_widest (t2))
6588 return 1;
6590 return 0;
6593 /* Return true if T is an INTEGER_CST whose numerical value (extended
6594 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6596 bool
6597 tree_fits_shwi_p (const_tree t)
6599 return (t != NULL_TREE
6600 && TREE_CODE (t) == INTEGER_CST
6601 && wi::fits_shwi_p (wi::to_widest (t)));
6604 /* Return true if T is an INTEGER_CST whose numerical value (extended
6605 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6607 bool
6608 tree_fits_uhwi_p (const_tree t)
6610 return (t != NULL_TREE
6611 && TREE_CODE (t) == INTEGER_CST
6612 && wi::fits_uhwi_p (wi::to_widest (t)));
6615 /* T is an INTEGER_CST whose numerical value (extended according to
6616 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6617 HOST_WIDE_INT. */
6619 HOST_WIDE_INT
6620 tree_to_shwi (const_tree t)
6622 gcc_assert (tree_fits_shwi_p (t));
6623 return TREE_INT_CST_LOW (t);
6626 /* T is an INTEGER_CST whose numerical value (extended according to
6627 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6628 HOST_WIDE_INT. */
6630 unsigned HOST_WIDE_INT
6631 tree_to_uhwi (const_tree t)
6633 gcc_assert (tree_fits_uhwi_p (t));
6634 return TREE_INT_CST_LOW (t);
6637 /* Return the most significant (sign) bit of T. */
6640 tree_int_cst_sign_bit (const_tree t)
6642 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6644 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6647 /* Return an indication of the sign of the integer constant T.
6648 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6649 Note that -1 will never be returned if T's type is unsigned. */
6652 tree_int_cst_sgn (const_tree t)
6654 if (wi::to_wide (t) == 0)
6655 return 0;
6656 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6657 return 1;
6658 else if (wi::neg_p (wi::to_wide (t)))
6659 return -1;
6660 else
6661 return 1;
6664 /* Return the minimum number of bits needed to represent VALUE in a
6665 signed or unsigned type, UNSIGNEDP says which. */
6667 unsigned int
6668 tree_int_cst_min_precision (tree value, signop sgn)
6670 /* If the value is negative, compute its negative minus 1. The latter
6671 adjustment is because the absolute value of the largest negative value
6672 is one larger than the largest positive value. This is equivalent to
6673 a bit-wise negation, so use that operation instead. */
6675 if (tree_int_cst_sgn (value) < 0)
6676 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6678 /* Return the number of bits needed, taking into account the fact
6679 that we need one more bit for a signed than unsigned type.
6680 If value is 0 or -1, the minimum precision is 1 no matter
6681 whether unsignedp is true or false. */
6683 if (integer_zerop (value))
6684 return 1;
6685 else
6686 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6689 /* Return truthvalue of whether T1 is the same tree structure as T2.
6690 Return 1 if they are the same.
6691 Return 0 if they are understandably different.
6692 Return -1 if either contains tree structure not understood by
6693 this function. */
6696 simple_cst_equal (const_tree t1, const_tree t2)
6698 enum tree_code code1, code2;
6699 int cmp;
6700 int i;
6702 if (t1 == t2)
6703 return 1;
6704 if (t1 == 0 || t2 == 0)
6705 return 0;
6707 code1 = TREE_CODE (t1);
6708 code2 = TREE_CODE (t2);
6710 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6712 if (CONVERT_EXPR_CODE_P (code2)
6713 || code2 == NON_LVALUE_EXPR)
6714 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6715 else
6716 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6719 else if (CONVERT_EXPR_CODE_P (code2)
6720 || code2 == NON_LVALUE_EXPR)
6721 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6723 if (code1 != code2)
6724 return 0;
6726 switch (code1)
6728 case INTEGER_CST:
6729 return wi::to_widest (t1) == wi::to_widest (t2);
6731 case REAL_CST:
6732 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6734 case FIXED_CST:
6735 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6737 case STRING_CST:
6738 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6739 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6740 TREE_STRING_LENGTH (t1)));
6742 case CONSTRUCTOR:
6744 unsigned HOST_WIDE_INT idx;
6745 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6746 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6748 if (vec_safe_length (v1) != vec_safe_length (v2))
6749 return false;
6751 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6752 /* ??? Should we handle also fields here? */
6753 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6754 return false;
6755 return true;
6758 case SAVE_EXPR:
6759 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6761 case CALL_EXPR:
6762 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6763 if (cmp <= 0)
6764 return cmp;
6765 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6766 return 0;
6768 const_tree arg1, arg2;
6769 const_call_expr_arg_iterator iter1, iter2;
6770 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6771 arg2 = first_const_call_expr_arg (t2, &iter2);
6772 arg1 && arg2;
6773 arg1 = next_const_call_expr_arg (&iter1),
6774 arg2 = next_const_call_expr_arg (&iter2))
6776 cmp = simple_cst_equal (arg1, arg2);
6777 if (cmp <= 0)
6778 return cmp;
6780 return arg1 == arg2;
6783 case TARGET_EXPR:
6784 /* Special case: if either target is an unallocated VAR_DECL,
6785 it means that it's going to be unified with whatever the
6786 TARGET_EXPR is really supposed to initialize, so treat it
6787 as being equivalent to anything. */
6788 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6789 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6790 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6791 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6792 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6793 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6794 cmp = 1;
6795 else
6796 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6798 if (cmp <= 0)
6799 return cmp;
6801 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6803 case WITH_CLEANUP_EXPR:
6804 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6805 if (cmp <= 0)
6806 return cmp;
6808 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6810 case COMPONENT_REF:
6811 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6812 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6814 return 0;
6816 case VAR_DECL:
6817 case PARM_DECL:
6818 case CONST_DECL:
6819 case FUNCTION_DECL:
6820 return 0;
6822 default:
6823 break;
6826 /* This general rule works for most tree codes. All exceptions should be
6827 handled above. If this is a language-specific tree code, we can't
6828 trust what might be in the operand, so say we don't know
6829 the situation. */
6830 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6831 return -1;
6833 switch (TREE_CODE_CLASS (code1))
6835 case tcc_unary:
6836 case tcc_binary:
6837 case tcc_comparison:
6838 case tcc_expression:
6839 case tcc_reference:
6840 case tcc_statement:
6841 cmp = 1;
6842 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6844 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6845 if (cmp <= 0)
6846 return cmp;
6849 return cmp;
6851 default:
6852 return -1;
6856 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6857 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6858 than U, respectively. */
6861 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6863 if (tree_int_cst_sgn (t) < 0)
6864 return -1;
6865 else if (!tree_fits_uhwi_p (t))
6866 return 1;
6867 else if (TREE_INT_CST_LOW (t) == u)
6868 return 0;
6869 else if (TREE_INT_CST_LOW (t) < u)
6870 return -1;
6871 else
6872 return 1;
6875 /* Return true if SIZE represents a constant size that is in bounds of
6876 what the middle-end and the backend accepts (covering not more than
6877 half of the address-space). */
6879 bool
6880 valid_constant_size_p (const_tree size)
6882 if (! tree_fits_uhwi_p (size)
6883 || TREE_OVERFLOW (size)
6884 || tree_int_cst_sign_bit (size) != 0)
6885 return false;
6886 return true;
6889 /* Return the precision of the type, or for a complex or vector type the
6890 precision of the type of its elements. */
6892 unsigned int
6893 element_precision (const_tree type)
6895 if (!TYPE_P (type))
6896 type = TREE_TYPE (type);
6897 enum tree_code code = TREE_CODE (type);
6898 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6899 type = TREE_TYPE (type);
6901 return TYPE_PRECISION (type);
6904 /* Return true if CODE represents an associative tree code. Otherwise
6905 return false. */
6906 bool
6907 associative_tree_code (enum tree_code code)
6909 switch (code)
6911 case BIT_IOR_EXPR:
6912 case BIT_AND_EXPR:
6913 case BIT_XOR_EXPR:
6914 case PLUS_EXPR:
6915 case MULT_EXPR:
6916 case MIN_EXPR:
6917 case MAX_EXPR:
6918 return true;
6920 default:
6921 break;
6923 return false;
6926 /* Return true if CODE represents a commutative tree code. Otherwise
6927 return false. */
6928 bool
6929 commutative_tree_code (enum tree_code code)
6931 switch (code)
6933 case PLUS_EXPR:
6934 case MULT_EXPR:
6935 case MULT_HIGHPART_EXPR:
6936 case MIN_EXPR:
6937 case MAX_EXPR:
6938 case BIT_IOR_EXPR:
6939 case BIT_XOR_EXPR:
6940 case BIT_AND_EXPR:
6941 case NE_EXPR:
6942 case EQ_EXPR:
6943 case UNORDERED_EXPR:
6944 case ORDERED_EXPR:
6945 case UNEQ_EXPR:
6946 case LTGT_EXPR:
6947 case TRUTH_AND_EXPR:
6948 case TRUTH_XOR_EXPR:
6949 case TRUTH_OR_EXPR:
6950 case WIDEN_MULT_EXPR:
6951 case VEC_WIDEN_MULT_HI_EXPR:
6952 case VEC_WIDEN_MULT_LO_EXPR:
6953 case VEC_WIDEN_MULT_EVEN_EXPR:
6954 case VEC_WIDEN_MULT_ODD_EXPR:
6955 return true;
6957 default:
6958 break;
6960 return false;
6963 /* Return true if CODE represents a ternary tree code for which the
6964 first two operands are commutative. Otherwise return false. */
6965 bool
6966 commutative_ternary_tree_code (enum tree_code code)
6968 switch (code)
6970 case WIDEN_MULT_PLUS_EXPR:
6971 case WIDEN_MULT_MINUS_EXPR:
6972 case DOT_PROD_EXPR:
6973 case FMA_EXPR:
6974 return true;
6976 default:
6977 break;
6979 return false;
6982 /* Returns true if CODE can overflow. */
6984 bool
6985 operation_can_overflow (enum tree_code code)
6987 switch (code)
6989 case PLUS_EXPR:
6990 case MINUS_EXPR:
6991 case MULT_EXPR:
6992 case LSHIFT_EXPR:
6993 /* Can overflow in various ways. */
6994 return true;
6995 case TRUNC_DIV_EXPR:
6996 case EXACT_DIV_EXPR:
6997 case FLOOR_DIV_EXPR:
6998 case CEIL_DIV_EXPR:
6999 /* For INT_MIN / -1. */
7000 return true;
7001 case NEGATE_EXPR:
7002 case ABS_EXPR:
7003 /* For -INT_MIN. */
7004 return true;
7005 default:
7006 /* These operators cannot overflow. */
7007 return false;
7011 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7012 ftrapv doesn't generate trapping insns for CODE. */
7014 bool
7015 operation_no_trapping_overflow (tree type, enum tree_code code)
7017 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7019 /* We don't generate instructions that trap on overflow for complex or vector
7020 types. */
7021 if (!INTEGRAL_TYPE_P (type))
7022 return true;
7024 if (!TYPE_OVERFLOW_TRAPS (type))
7025 return true;
7027 switch (code)
7029 case PLUS_EXPR:
7030 case MINUS_EXPR:
7031 case MULT_EXPR:
7032 case NEGATE_EXPR:
7033 case ABS_EXPR:
7034 /* These operators can overflow, and -ftrapv generates trapping code for
7035 these. */
7036 return false;
7037 case TRUNC_DIV_EXPR:
7038 case EXACT_DIV_EXPR:
7039 case FLOOR_DIV_EXPR:
7040 case CEIL_DIV_EXPR:
7041 case LSHIFT_EXPR:
7042 /* These operators can overflow, but -ftrapv does not generate trapping
7043 code for these. */
7044 return true;
7045 default:
7046 /* These operators cannot overflow. */
7047 return true;
7051 namespace inchash
7054 /* Generate a hash value for an expression. This can be used iteratively
7055 by passing a previous result as the HSTATE argument.
7057 This function is intended to produce the same hash for expressions which
7058 would compare equal using operand_equal_p. */
7059 void
7060 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7062 int i;
7063 enum tree_code code;
7064 enum tree_code_class tclass;
7066 if (t == NULL_TREE || t == error_mark_node)
7068 hstate.merge_hash (0);
7069 return;
7072 if (!(flags & OEP_ADDRESS_OF))
7073 STRIP_NOPS (t);
7075 code = TREE_CODE (t);
7077 switch (code)
7079 /* Alas, constants aren't shared, so we can't rely on pointer
7080 identity. */
7081 case VOID_CST:
7082 hstate.merge_hash (0);
7083 return;
7084 case INTEGER_CST:
7085 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7086 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7087 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7088 return;
7089 case REAL_CST:
7091 unsigned int val2;
7092 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7093 val2 = rvc_zero;
7094 else
7095 val2 = real_hash (TREE_REAL_CST_PTR (t));
7096 hstate.merge_hash (val2);
7097 return;
7099 case FIXED_CST:
7101 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7102 hstate.merge_hash (val2);
7103 return;
7105 case STRING_CST:
7106 hstate.add ((const void *) TREE_STRING_POINTER (t),
7107 TREE_STRING_LENGTH (t));
7108 return;
7109 case COMPLEX_CST:
7110 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7111 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7112 return;
7113 case VECTOR_CST:
7115 unsigned i;
7116 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7117 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7118 return;
7120 case SSA_NAME:
7121 /* We can just compare by pointer. */
7122 hstate.add_hwi (SSA_NAME_VERSION (t));
7123 return;
7124 case PLACEHOLDER_EXPR:
7125 /* The node itself doesn't matter. */
7126 return;
7127 case BLOCK:
7128 case OMP_CLAUSE:
7129 /* Ignore. */
7130 return;
7131 case TREE_LIST:
7132 /* A list of expressions, for a CALL_EXPR or as the elements of a
7133 VECTOR_CST. */
7134 for (; t; t = TREE_CHAIN (t))
7135 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7136 return;
7137 case CONSTRUCTOR:
7139 unsigned HOST_WIDE_INT idx;
7140 tree field, value;
7141 flags &= ~OEP_ADDRESS_OF;
7142 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7144 inchash::add_expr (field, hstate, flags);
7145 inchash::add_expr (value, hstate, flags);
7147 return;
7149 case STATEMENT_LIST:
7151 tree_stmt_iterator i;
7152 for (i = tsi_start (CONST_CAST_TREE (t));
7153 !tsi_end_p (i); tsi_next (&i))
7154 inchash::add_expr (tsi_stmt (i), hstate, flags);
7155 return;
7157 case TREE_VEC:
7158 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7159 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7160 return;
7161 case FUNCTION_DECL:
7162 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7163 Otherwise nodes that compare equal according to operand_equal_p might
7164 get different hash codes. However, don't do this for machine specific
7165 or front end builtins, since the function code is overloaded in those
7166 cases. */
7167 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7168 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7170 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7171 code = TREE_CODE (t);
7173 /* FALL THROUGH */
7174 default:
7175 tclass = TREE_CODE_CLASS (code);
7177 if (tclass == tcc_declaration)
7179 /* DECL's have a unique ID */
7180 hstate.add_hwi (DECL_UID (t));
7182 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7184 /* For comparisons that can be swapped, use the lower
7185 tree code. */
7186 enum tree_code ccode = swap_tree_comparison (code);
7187 if (code < ccode)
7188 ccode = code;
7189 hstate.add_object (ccode);
7190 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7191 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7193 else if (CONVERT_EXPR_CODE_P (code))
7195 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7196 operand_equal_p. */
7197 enum tree_code ccode = NOP_EXPR;
7198 hstate.add_object (ccode);
7200 /* Don't hash the type, that can lead to having nodes which
7201 compare equal according to operand_equal_p, but which
7202 have different hash codes. Make sure to include signedness
7203 in the hash computation. */
7204 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7205 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7207 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7208 else if (code == MEM_REF
7209 && (flags & OEP_ADDRESS_OF) != 0
7210 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7211 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7212 && integer_zerop (TREE_OPERAND (t, 1)))
7213 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7214 hstate, flags);
7215 /* Don't ICE on FE specific trees, or their arguments etc.
7216 during operand_equal_p hash verification. */
7217 else if (!IS_EXPR_CODE_CLASS (tclass))
7218 gcc_assert (flags & OEP_HASH_CHECK);
7219 else
7221 unsigned int sflags = flags;
7223 hstate.add_object (code);
7225 switch (code)
7227 case ADDR_EXPR:
7228 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7229 flags |= OEP_ADDRESS_OF;
7230 sflags = flags;
7231 break;
7233 case INDIRECT_REF:
7234 case MEM_REF:
7235 case TARGET_MEM_REF:
7236 flags &= ~OEP_ADDRESS_OF;
7237 sflags = flags;
7238 break;
7240 case ARRAY_REF:
7241 case ARRAY_RANGE_REF:
7242 case COMPONENT_REF:
7243 case BIT_FIELD_REF:
7244 sflags &= ~OEP_ADDRESS_OF;
7245 break;
7247 case COND_EXPR:
7248 flags &= ~OEP_ADDRESS_OF;
7249 break;
7251 case FMA_EXPR:
7252 case WIDEN_MULT_PLUS_EXPR:
7253 case WIDEN_MULT_MINUS_EXPR:
7255 /* The multiplication operands are commutative. */
7256 inchash::hash one, two;
7257 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7258 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7259 hstate.add_commutative (one, two);
7260 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7261 return;
7264 case CALL_EXPR:
7265 if (CALL_EXPR_FN (t) == NULL_TREE)
7266 hstate.add_int (CALL_EXPR_IFN (t));
7267 break;
7269 case TARGET_EXPR:
7270 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7271 Usually different TARGET_EXPRs just should use
7272 different temporaries in their slots. */
7273 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7274 return;
7276 default:
7277 break;
7280 /* Don't hash the type, that can lead to having nodes which
7281 compare equal according to operand_equal_p, but which
7282 have different hash codes. */
7283 if (code == NON_LVALUE_EXPR)
7285 /* Make sure to include signness in the hash computation. */
7286 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7287 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7290 else if (commutative_tree_code (code))
7292 /* It's a commutative expression. We want to hash it the same
7293 however it appears. We do this by first hashing both operands
7294 and then rehashing based on the order of their independent
7295 hashes. */
7296 inchash::hash one, two;
7297 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7298 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7299 hstate.add_commutative (one, two);
7301 else
7302 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7303 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7304 i == 0 ? flags : sflags);
7306 return;
7312 /* Constructors for pointer, array and function types.
7313 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7314 constructed by language-dependent code, not here.) */
7316 /* Construct, lay out and return the type of pointers to TO_TYPE with
7317 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7318 reference all of memory. If such a type has already been
7319 constructed, reuse it. */
7321 tree
7322 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7323 bool can_alias_all)
7325 tree t;
7326 bool could_alias = can_alias_all;
7328 if (to_type == error_mark_node)
7329 return error_mark_node;
7331 /* If the pointed-to type has the may_alias attribute set, force
7332 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7333 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7334 can_alias_all = true;
7336 /* In some cases, languages will have things that aren't a POINTER_TYPE
7337 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7338 In that case, return that type without regard to the rest of our
7339 operands.
7341 ??? This is a kludge, but consistent with the way this function has
7342 always operated and there doesn't seem to be a good way to avoid this
7343 at the moment. */
7344 if (TYPE_POINTER_TO (to_type) != 0
7345 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7346 return TYPE_POINTER_TO (to_type);
7348 /* First, if we already have a type for pointers to TO_TYPE and it's
7349 the proper mode, use it. */
7350 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7351 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7352 return t;
7354 t = make_node (POINTER_TYPE);
7356 TREE_TYPE (t) = to_type;
7357 SET_TYPE_MODE (t, mode);
7358 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7359 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7360 TYPE_POINTER_TO (to_type) = t;
7362 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7363 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7364 SET_TYPE_STRUCTURAL_EQUALITY (t);
7365 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7366 TYPE_CANONICAL (t)
7367 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7368 mode, false);
7370 /* Lay out the type. This function has many callers that are concerned
7371 with expression-construction, and this simplifies them all. */
7372 layout_type (t);
7374 return t;
7377 /* By default build pointers in ptr_mode. */
7379 tree
7380 build_pointer_type (tree to_type)
7382 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7383 : TYPE_ADDR_SPACE (to_type);
7384 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7385 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7388 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7390 tree
7391 build_reference_type_for_mode (tree to_type, machine_mode mode,
7392 bool can_alias_all)
7394 tree t;
7395 bool could_alias = can_alias_all;
7397 if (to_type == error_mark_node)
7398 return error_mark_node;
7400 /* If the pointed-to type has the may_alias attribute set, force
7401 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7402 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7403 can_alias_all = true;
7405 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7406 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7407 In that case, return that type without regard to the rest of our
7408 operands.
7410 ??? This is a kludge, but consistent with the way this function has
7411 always operated and there doesn't seem to be a good way to avoid this
7412 at the moment. */
7413 if (TYPE_REFERENCE_TO (to_type) != 0
7414 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7415 return TYPE_REFERENCE_TO (to_type);
7417 /* First, if we already have a type for pointers to TO_TYPE and it's
7418 the proper mode, use it. */
7419 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7420 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7421 return t;
7423 t = make_node (REFERENCE_TYPE);
7425 TREE_TYPE (t) = to_type;
7426 SET_TYPE_MODE (t, mode);
7427 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7428 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7429 TYPE_REFERENCE_TO (to_type) = t;
7431 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7432 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7433 SET_TYPE_STRUCTURAL_EQUALITY (t);
7434 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7435 TYPE_CANONICAL (t)
7436 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7437 mode, false);
7439 layout_type (t);
7441 return t;
7445 /* Build the node for the type of references-to-TO_TYPE by default
7446 in ptr_mode. */
7448 tree
7449 build_reference_type (tree to_type)
7451 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7452 : TYPE_ADDR_SPACE (to_type);
7453 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7454 return build_reference_type_for_mode (to_type, pointer_mode, false);
7457 #define MAX_INT_CACHED_PREC \
7458 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7459 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7461 /* Builds a signed or unsigned integer type of precision PRECISION.
7462 Used for C bitfields whose precision does not match that of
7463 built-in target types. */
7464 tree
7465 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7466 int unsignedp)
7468 tree itype, ret;
7470 if (unsignedp)
7471 unsignedp = MAX_INT_CACHED_PREC + 1;
7473 if (precision <= MAX_INT_CACHED_PREC)
7475 itype = nonstandard_integer_type_cache[precision + unsignedp];
7476 if (itype)
7477 return itype;
7480 itype = make_node (INTEGER_TYPE);
7481 TYPE_PRECISION (itype) = precision;
7483 if (unsignedp)
7484 fixup_unsigned_type (itype);
7485 else
7486 fixup_signed_type (itype);
7488 ret = itype;
7489 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7490 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7491 if (precision <= MAX_INT_CACHED_PREC)
7492 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7494 return ret;
7497 #define MAX_BOOL_CACHED_PREC \
7498 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7499 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7501 /* Builds a boolean type of precision PRECISION.
7502 Used for boolean vectors to choose proper vector element size. */
7503 tree
7504 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7506 tree type;
7508 if (precision <= MAX_BOOL_CACHED_PREC)
7510 type = nonstandard_boolean_type_cache[precision];
7511 if (type)
7512 return type;
7515 type = make_node (BOOLEAN_TYPE);
7516 TYPE_PRECISION (type) = precision;
7517 fixup_signed_type (type);
7519 if (precision <= MAX_INT_CACHED_PREC)
7520 nonstandard_boolean_type_cache[precision] = type;
7522 return type;
7525 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7526 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7527 is true, reuse such a type that has already been constructed. */
7529 static tree
7530 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7532 tree itype = make_node (INTEGER_TYPE);
7534 TREE_TYPE (itype) = type;
7536 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7537 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7539 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7540 SET_TYPE_MODE (itype, TYPE_MODE (type));
7541 TYPE_SIZE (itype) = TYPE_SIZE (type);
7542 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7543 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7544 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7545 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7547 if (!shared)
7548 return itype;
7550 if ((TYPE_MIN_VALUE (itype)
7551 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7552 || (TYPE_MAX_VALUE (itype)
7553 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7555 /* Since we cannot reliably merge this type, we need to compare it using
7556 structural equality checks. */
7557 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7558 return itype;
7561 hashval_t hash = type_hash_canon_hash (itype);
7562 itype = type_hash_canon (hash, itype);
7564 return itype;
7567 /* Wrapper around build_range_type_1 with SHARED set to true. */
7569 tree
7570 build_range_type (tree type, tree lowval, tree highval)
7572 return build_range_type_1 (type, lowval, highval, true);
7575 /* Wrapper around build_range_type_1 with SHARED set to false. */
7577 tree
7578 build_nonshared_range_type (tree type, tree lowval, tree highval)
7580 return build_range_type_1 (type, lowval, highval, false);
7583 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7584 MAXVAL should be the maximum value in the domain
7585 (one less than the length of the array).
7587 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7588 We don't enforce this limit, that is up to caller (e.g. language front end).
7589 The limit exists because the result is a signed type and we don't handle
7590 sizes that use more than one HOST_WIDE_INT. */
7592 tree
7593 build_index_type (tree maxval)
7595 return build_range_type (sizetype, size_zero_node, maxval);
7598 /* Return true if the debug information for TYPE, a subtype, should be emitted
7599 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7600 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7601 debug info and doesn't reflect the source code. */
7603 bool
7604 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7606 tree base_type = TREE_TYPE (type), low, high;
7608 /* Subrange types have a base type which is an integral type. */
7609 if (!INTEGRAL_TYPE_P (base_type))
7610 return false;
7612 /* Get the real bounds of the subtype. */
7613 if (lang_hooks.types.get_subrange_bounds)
7614 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7615 else
7617 low = TYPE_MIN_VALUE (type);
7618 high = TYPE_MAX_VALUE (type);
7621 /* If the type and its base type have the same representation and the same
7622 name, then the type is not a subrange but a copy of the base type. */
7623 if ((TREE_CODE (base_type) == INTEGER_TYPE
7624 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7625 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7626 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7627 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7628 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7629 return false;
7631 if (lowval)
7632 *lowval = low;
7633 if (highval)
7634 *highval = high;
7635 return true;
7638 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7639 and number of elements specified by the range of values of INDEX_TYPE.
7640 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7641 If SHARED is true, reuse such a type that has already been constructed. */
7643 static tree
7644 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7645 bool shared)
7647 tree t;
7649 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7651 error ("arrays of functions are not meaningful");
7652 elt_type = integer_type_node;
7655 t = make_node (ARRAY_TYPE);
7656 TREE_TYPE (t) = elt_type;
7657 TYPE_DOMAIN (t) = index_type;
7658 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7659 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7660 layout_type (t);
7662 /* If the element type is incomplete at this point we get marked for
7663 structural equality. Do not record these types in the canonical
7664 type hashtable. */
7665 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7666 return t;
7668 if (shared)
7670 hashval_t hash = type_hash_canon_hash (t);
7671 t = type_hash_canon (hash, t);
7674 if (TYPE_CANONICAL (t) == t)
7676 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7677 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7678 || in_lto_p)
7679 SET_TYPE_STRUCTURAL_EQUALITY (t);
7680 else if (TYPE_CANONICAL (elt_type) != elt_type
7681 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7682 TYPE_CANONICAL (t)
7683 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7684 index_type
7685 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7686 typeless_storage, shared);
7689 return t;
7692 /* Wrapper around build_array_type_1 with SHARED set to true. */
7694 tree
7695 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7697 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7700 /* Wrapper around build_array_type_1 with SHARED set to false. */
7702 tree
7703 build_nonshared_array_type (tree elt_type, tree index_type)
7705 return build_array_type_1 (elt_type, index_type, false, false);
7708 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7709 sizetype. */
7711 tree
7712 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7714 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7717 /* Recursively examines the array elements of TYPE, until a non-array
7718 element type is found. */
7720 tree
7721 strip_array_types (tree type)
7723 while (TREE_CODE (type) == ARRAY_TYPE)
7724 type = TREE_TYPE (type);
7726 return type;
7729 /* Computes the canonical argument types from the argument type list
7730 ARGTYPES.
7732 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7733 on entry to this function, or if any of the ARGTYPES are
7734 structural.
7736 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7737 true on entry to this function, or if any of the ARGTYPES are
7738 non-canonical.
7740 Returns a canonical argument list, which may be ARGTYPES when the
7741 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7742 true) or would not differ from ARGTYPES. */
7744 static tree
7745 maybe_canonicalize_argtypes (tree argtypes,
7746 bool *any_structural_p,
7747 bool *any_noncanonical_p)
7749 tree arg;
7750 bool any_noncanonical_argtypes_p = false;
7752 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7754 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7755 /* Fail gracefully by stating that the type is structural. */
7756 *any_structural_p = true;
7757 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7758 *any_structural_p = true;
7759 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7760 || TREE_PURPOSE (arg))
7761 /* If the argument has a default argument, we consider it
7762 non-canonical even though the type itself is canonical.
7763 That way, different variants of function and method types
7764 with default arguments will all point to the variant with
7765 no defaults as their canonical type. */
7766 any_noncanonical_argtypes_p = true;
7769 if (*any_structural_p)
7770 return argtypes;
7772 if (any_noncanonical_argtypes_p)
7774 /* Build the canonical list of argument types. */
7775 tree canon_argtypes = NULL_TREE;
7776 bool is_void = false;
7778 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7780 if (arg == void_list_node)
7781 is_void = true;
7782 else
7783 canon_argtypes = tree_cons (NULL_TREE,
7784 TYPE_CANONICAL (TREE_VALUE (arg)),
7785 canon_argtypes);
7788 canon_argtypes = nreverse (canon_argtypes);
7789 if (is_void)
7790 canon_argtypes = chainon (canon_argtypes, void_list_node);
7792 /* There is a non-canonical type. */
7793 *any_noncanonical_p = true;
7794 return canon_argtypes;
7797 /* The canonical argument types are the same as ARGTYPES. */
7798 return argtypes;
7801 /* Construct, lay out and return
7802 the type of functions returning type VALUE_TYPE
7803 given arguments of types ARG_TYPES.
7804 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7805 are data type nodes for the arguments of the function.
7806 If such a type has already been constructed, reuse it. */
7808 tree
7809 build_function_type (tree value_type, tree arg_types)
7811 tree t;
7812 inchash::hash hstate;
7813 bool any_structural_p, any_noncanonical_p;
7814 tree canon_argtypes;
7816 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7818 error ("function return type cannot be function");
7819 value_type = integer_type_node;
7822 /* Make a node of the sort we want. */
7823 t = make_node (FUNCTION_TYPE);
7824 TREE_TYPE (t) = value_type;
7825 TYPE_ARG_TYPES (t) = arg_types;
7827 /* If we already have such a type, use the old one. */
7828 hashval_t hash = type_hash_canon_hash (t);
7829 t = type_hash_canon (hash, t);
7831 /* Set up the canonical type. */
7832 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7833 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7834 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7835 &any_structural_p,
7836 &any_noncanonical_p);
7837 if (any_structural_p)
7838 SET_TYPE_STRUCTURAL_EQUALITY (t);
7839 else if (any_noncanonical_p)
7840 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7841 canon_argtypes);
7843 if (!COMPLETE_TYPE_P (t))
7844 layout_type (t);
7845 return t;
7848 /* Build a function type. The RETURN_TYPE is the type returned by the
7849 function. If VAARGS is set, no void_type_node is appended to the
7850 list. ARGP must be always be terminated be a NULL_TREE. */
7852 static tree
7853 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7855 tree t, args, last;
7857 t = va_arg (argp, tree);
7858 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7859 args = tree_cons (NULL_TREE, t, args);
7861 if (vaargs)
7863 last = args;
7864 if (args != NULL_TREE)
7865 args = nreverse (args);
7866 gcc_assert (last != void_list_node);
7868 else if (args == NULL_TREE)
7869 args = void_list_node;
7870 else
7872 last = args;
7873 args = nreverse (args);
7874 TREE_CHAIN (last) = void_list_node;
7876 args = build_function_type (return_type, args);
7878 return args;
7881 /* Build a function type. The RETURN_TYPE is the type returned by the
7882 function. If additional arguments are provided, they are
7883 additional argument types. The list of argument types must always
7884 be terminated by NULL_TREE. */
7886 tree
7887 build_function_type_list (tree return_type, ...)
7889 tree args;
7890 va_list p;
7892 va_start (p, return_type);
7893 args = build_function_type_list_1 (false, return_type, p);
7894 va_end (p);
7895 return args;
7898 /* Build a variable argument function type. The RETURN_TYPE is the
7899 type returned by the function. If additional arguments are provided,
7900 they are additional argument types. The list of argument types must
7901 always be terminated by NULL_TREE. */
7903 tree
7904 build_varargs_function_type_list (tree return_type, ...)
7906 tree args;
7907 va_list p;
7909 va_start (p, return_type);
7910 args = build_function_type_list_1 (true, return_type, p);
7911 va_end (p);
7913 return args;
7916 /* Build a function type. RETURN_TYPE is the type returned by the
7917 function; VAARGS indicates whether the function takes varargs. The
7918 function takes N named arguments, the types of which are provided in
7919 ARG_TYPES. */
7921 static tree
7922 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7923 tree *arg_types)
7925 int i;
7926 tree t = vaargs ? NULL_TREE : void_list_node;
7928 for (i = n - 1; i >= 0; i--)
7929 t = tree_cons (NULL_TREE, arg_types[i], t);
7931 return build_function_type (return_type, t);
7934 /* Build a function type. RETURN_TYPE is the type returned by the
7935 function. The function takes N named arguments, the types of which
7936 are provided in ARG_TYPES. */
7938 tree
7939 build_function_type_array (tree return_type, int n, tree *arg_types)
7941 return build_function_type_array_1 (false, return_type, n, arg_types);
7944 /* Build a variable argument function type. RETURN_TYPE is the type
7945 returned by the function. The function takes N named arguments, the
7946 types of which are provided in ARG_TYPES. */
7948 tree
7949 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7951 return build_function_type_array_1 (true, return_type, n, arg_types);
7954 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7955 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7956 for the method. An implicit additional parameter (of type
7957 pointer-to-BASETYPE) is added to the ARGTYPES. */
7959 tree
7960 build_method_type_directly (tree basetype,
7961 tree rettype,
7962 tree argtypes)
7964 tree t;
7965 tree ptype;
7966 bool any_structural_p, any_noncanonical_p;
7967 tree canon_argtypes;
7969 /* Make a node of the sort we want. */
7970 t = make_node (METHOD_TYPE);
7972 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7973 TREE_TYPE (t) = rettype;
7974 ptype = build_pointer_type (basetype);
7976 /* The actual arglist for this function includes a "hidden" argument
7977 which is "this". Put it into the list of argument types. */
7978 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7979 TYPE_ARG_TYPES (t) = argtypes;
7981 /* If we already have such a type, use the old one. */
7982 hashval_t hash = type_hash_canon_hash (t);
7983 t = type_hash_canon (hash, t);
7985 /* Set up the canonical type. */
7986 any_structural_p
7987 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7988 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7989 any_noncanonical_p
7990 = (TYPE_CANONICAL (basetype) != basetype
7991 || TYPE_CANONICAL (rettype) != rettype);
7992 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7993 &any_structural_p,
7994 &any_noncanonical_p);
7995 if (any_structural_p)
7996 SET_TYPE_STRUCTURAL_EQUALITY (t);
7997 else if (any_noncanonical_p)
7998 TYPE_CANONICAL (t)
7999 = build_method_type_directly (TYPE_CANONICAL (basetype),
8000 TYPE_CANONICAL (rettype),
8001 canon_argtypes);
8002 if (!COMPLETE_TYPE_P (t))
8003 layout_type (t);
8005 return t;
8008 /* Construct, lay out and return the type of methods belonging to class
8009 BASETYPE and whose arguments and values are described by TYPE.
8010 If that type exists already, reuse it.
8011 TYPE must be a FUNCTION_TYPE node. */
8013 tree
8014 build_method_type (tree basetype, tree type)
8016 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8018 return build_method_type_directly (basetype,
8019 TREE_TYPE (type),
8020 TYPE_ARG_TYPES (type));
8023 /* Construct, lay out and return the type of offsets to a value
8024 of type TYPE, within an object of type BASETYPE.
8025 If a suitable offset type exists already, reuse it. */
8027 tree
8028 build_offset_type (tree basetype, tree type)
8030 tree t;
8032 /* Make a node of the sort we want. */
8033 t = make_node (OFFSET_TYPE);
8035 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8036 TREE_TYPE (t) = type;
8038 /* If we already have such a type, use the old one. */
8039 hashval_t hash = type_hash_canon_hash (t);
8040 t = type_hash_canon (hash, t);
8042 if (!COMPLETE_TYPE_P (t))
8043 layout_type (t);
8045 if (TYPE_CANONICAL (t) == t)
8047 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8048 || TYPE_STRUCTURAL_EQUALITY_P (type))
8049 SET_TYPE_STRUCTURAL_EQUALITY (t);
8050 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8051 || TYPE_CANONICAL (type) != type)
8052 TYPE_CANONICAL (t)
8053 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8054 TYPE_CANONICAL (type));
8057 return t;
8060 /* Create a complex type whose components are COMPONENT_TYPE.
8062 If NAMED is true, the type is given a TYPE_NAME. We do not always
8063 do so because this creates a DECL node and thus make the DECL_UIDs
8064 dependent on the type canonicalization hashtable, which is GC-ed,
8065 so the DECL_UIDs would not be stable wrt garbage collection. */
8067 tree
8068 build_complex_type (tree component_type, bool named)
8070 tree t;
8072 gcc_assert (INTEGRAL_TYPE_P (component_type)
8073 || SCALAR_FLOAT_TYPE_P (component_type)
8074 || FIXED_POINT_TYPE_P (component_type));
8076 /* Make a node of the sort we want. */
8077 t = make_node (COMPLEX_TYPE);
8079 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8081 /* If we already have such a type, use the old one. */
8082 hashval_t hash = type_hash_canon_hash (t);
8083 t = type_hash_canon (hash, t);
8085 if (!COMPLETE_TYPE_P (t))
8086 layout_type (t);
8088 if (TYPE_CANONICAL (t) == t)
8090 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8091 SET_TYPE_STRUCTURAL_EQUALITY (t);
8092 else if (TYPE_CANONICAL (component_type) != component_type)
8093 TYPE_CANONICAL (t)
8094 = build_complex_type (TYPE_CANONICAL (component_type), named);
8097 /* We need to create a name, since complex is a fundamental type. */
8098 if (!TYPE_NAME (t) && named)
8100 const char *name;
8101 if (component_type == char_type_node)
8102 name = "complex char";
8103 else if (component_type == signed_char_type_node)
8104 name = "complex signed char";
8105 else if (component_type == unsigned_char_type_node)
8106 name = "complex unsigned char";
8107 else if (component_type == short_integer_type_node)
8108 name = "complex short int";
8109 else if (component_type == short_unsigned_type_node)
8110 name = "complex short unsigned int";
8111 else if (component_type == integer_type_node)
8112 name = "complex int";
8113 else if (component_type == unsigned_type_node)
8114 name = "complex unsigned int";
8115 else if (component_type == long_integer_type_node)
8116 name = "complex long int";
8117 else if (component_type == long_unsigned_type_node)
8118 name = "complex long unsigned int";
8119 else if (component_type == long_long_integer_type_node)
8120 name = "complex long long int";
8121 else if (component_type == long_long_unsigned_type_node)
8122 name = "complex long long unsigned int";
8123 else
8124 name = 0;
8126 if (name != 0)
8127 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8128 get_identifier (name), t);
8131 return build_qualified_type (t, TYPE_QUALS (component_type));
8134 /* If TYPE is a real or complex floating-point type and the target
8135 does not directly support arithmetic on TYPE then return the wider
8136 type to be used for arithmetic on TYPE. Otherwise, return
8137 NULL_TREE. */
8139 tree
8140 excess_precision_type (tree type)
8142 /* The target can give two different responses to the question of
8143 which excess precision mode it would like depending on whether we
8144 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8146 enum excess_precision_type requested_type
8147 = (flag_excess_precision == EXCESS_PRECISION_FAST
8148 ? EXCESS_PRECISION_TYPE_FAST
8149 : EXCESS_PRECISION_TYPE_STANDARD);
8151 enum flt_eval_method target_flt_eval_method
8152 = targetm.c.excess_precision (requested_type);
8154 /* The target should not ask for unpredictable float evaluation (though
8155 it might advertise that implicitly the evaluation is unpredictable,
8156 but we don't care about that here, it will have been reported
8157 elsewhere). If it does ask for unpredictable evaluation, we have
8158 nothing to do here. */
8159 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8161 /* Nothing to do. The target has asked for all types we know about
8162 to be computed with their native precision and range. */
8163 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8164 return NULL_TREE;
8166 /* The target will promote this type in a target-dependent way, so excess
8167 precision ought to leave it alone. */
8168 if (targetm.promoted_type (type) != NULL_TREE)
8169 return NULL_TREE;
8171 machine_mode float16_type_mode = (float16_type_node
8172 ? TYPE_MODE (float16_type_node)
8173 : VOIDmode);
8174 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8175 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8177 switch (TREE_CODE (type))
8179 case REAL_TYPE:
8181 machine_mode type_mode = TYPE_MODE (type);
8182 switch (target_flt_eval_method)
8184 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8185 if (type_mode == float16_type_mode)
8186 return float_type_node;
8187 break;
8188 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8189 if (type_mode == float16_type_mode
8190 || type_mode == float_type_mode)
8191 return double_type_node;
8192 break;
8193 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8194 if (type_mode == float16_type_mode
8195 || type_mode == float_type_mode
8196 || type_mode == double_type_mode)
8197 return long_double_type_node;
8198 break;
8199 default:
8200 gcc_unreachable ();
8202 break;
8204 case COMPLEX_TYPE:
8206 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8207 return NULL_TREE;
8208 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8209 switch (target_flt_eval_method)
8211 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8212 if (type_mode == float16_type_mode)
8213 return complex_float_type_node;
8214 break;
8215 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8216 if (type_mode == float16_type_mode
8217 || type_mode == float_type_mode)
8218 return complex_double_type_node;
8219 break;
8220 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8221 if (type_mode == float16_type_mode
8222 || type_mode == float_type_mode
8223 || type_mode == double_type_mode)
8224 return complex_long_double_type_node;
8225 break;
8226 default:
8227 gcc_unreachable ();
8229 break;
8231 default:
8232 break;
8235 return NULL_TREE;
8238 /* Return OP, stripped of any conversions to wider types as much as is safe.
8239 Converting the value back to OP's type makes a value equivalent to OP.
8241 If FOR_TYPE is nonzero, we return a value which, if converted to
8242 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8244 OP must have integer, real or enumeral type. Pointers are not allowed!
8246 There are some cases where the obvious value we could return
8247 would regenerate to OP if converted to OP's type,
8248 but would not extend like OP to wider types.
8249 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8250 For example, if OP is (unsigned short)(signed char)-1,
8251 we avoid returning (signed char)-1 if FOR_TYPE is int,
8252 even though extending that to an unsigned short would regenerate OP,
8253 since the result of extending (signed char)-1 to (int)
8254 is different from (int) OP. */
8256 tree
8257 get_unwidened (tree op, tree for_type)
8259 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8260 tree type = TREE_TYPE (op);
8261 unsigned final_prec
8262 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8263 int uns
8264 = (for_type != 0 && for_type != type
8265 && final_prec > TYPE_PRECISION (type)
8266 && TYPE_UNSIGNED (type));
8267 tree win = op;
8269 while (CONVERT_EXPR_P (op))
8271 int bitschange;
8273 /* TYPE_PRECISION on vector types has different meaning
8274 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8275 so avoid them here. */
8276 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8277 break;
8279 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8280 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8282 /* Truncations are many-one so cannot be removed.
8283 Unless we are later going to truncate down even farther. */
8284 if (bitschange < 0
8285 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8286 break;
8288 /* See what's inside this conversion. If we decide to strip it,
8289 we will set WIN. */
8290 op = TREE_OPERAND (op, 0);
8292 /* If we have not stripped any zero-extensions (uns is 0),
8293 we can strip any kind of extension.
8294 If we have previously stripped a zero-extension,
8295 only zero-extensions can safely be stripped.
8296 Any extension can be stripped if the bits it would produce
8297 are all going to be discarded later by truncating to FOR_TYPE. */
8299 if (bitschange > 0)
8301 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8302 win = op;
8303 /* TYPE_UNSIGNED says whether this is a zero-extension.
8304 Let's avoid computing it if it does not affect WIN
8305 and if UNS will not be needed again. */
8306 if ((uns
8307 || CONVERT_EXPR_P (op))
8308 && TYPE_UNSIGNED (TREE_TYPE (op)))
8310 uns = 1;
8311 win = op;
8316 /* If we finally reach a constant see if it fits in sth smaller and
8317 in that case convert it. */
8318 if (TREE_CODE (win) == INTEGER_CST)
8320 tree wtype = TREE_TYPE (win);
8321 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8322 if (for_type)
8323 prec = MAX (prec, final_prec);
8324 if (prec < TYPE_PRECISION (wtype))
8326 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8327 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8328 win = fold_convert (t, win);
8332 return win;
8335 /* Return OP or a simpler expression for a narrower value
8336 which can be sign-extended or zero-extended to give back OP.
8337 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8338 or 0 if the value should be sign-extended. */
8340 tree
8341 get_narrower (tree op, int *unsignedp_ptr)
8343 int uns = 0;
8344 int first = 1;
8345 tree win = op;
8346 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8348 while (TREE_CODE (op) == NOP_EXPR)
8350 int bitschange
8351 = (TYPE_PRECISION (TREE_TYPE (op))
8352 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8354 /* Truncations are many-one so cannot be removed. */
8355 if (bitschange < 0)
8356 break;
8358 /* See what's inside this conversion. If we decide to strip it,
8359 we will set WIN. */
8361 if (bitschange > 0)
8363 op = TREE_OPERAND (op, 0);
8364 /* An extension: the outermost one can be stripped,
8365 but remember whether it is zero or sign extension. */
8366 if (first)
8367 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8368 /* Otherwise, if a sign extension has been stripped,
8369 only sign extensions can now be stripped;
8370 if a zero extension has been stripped, only zero-extensions. */
8371 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8372 break;
8373 first = 0;
8375 else /* bitschange == 0 */
8377 /* A change in nominal type can always be stripped, but we must
8378 preserve the unsignedness. */
8379 if (first)
8380 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8381 first = 0;
8382 op = TREE_OPERAND (op, 0);
8383 /* Keep trying to narrow, but don't assign op to win if it
8384 would turn an integral type into something else. */
8385 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8386 continue;
8389 win = op;
8392 if (TREE_CODE (op) == COMPONENT_REF
8393 /* Since type_for_size always gives an integer type. */
8394 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8395 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8396 /* Ensure field is laid out already. */
8397 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8398 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8400 unsigned HOST_WIDE_INT innerprec
8401 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8402 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8403 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8404 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8406 /* We can get this structure field in a narrower type that fits it,
8407 but the resulting extension to its nominal type (a fullword type)
8408 must satisfy the same conditions as for other extensions.
8410 Do this only for fields that are aligned (not bit-fields),
8411 because when bit-field insns will be used there is no
8412 advantage in doing this. */
8414 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8415 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8416 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8417 && type != 0)
8419 if (first)
8420 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8421 win = fold_convert (type, op);
8425 *unsignedp_ptr = uns;
8426 return win;
8429 /* Return true if integer constant C has a value that is permissible
8430 for TYPE, an integral type. */
8432 bool
8433 int_fits_type_p (const_tree c, const_tree type)
8435 tree type_low_bound, type_high_bound;
8436 bool ok_for_low_bound, ok_for_high_bound;
8437 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8439 /* Non-standard boolean types can have arbitrary precision but various
8440 transformations assume that they can only take values 0 and +/-1. */
8441 if (TREE_CODE (type) == BOOLEAN_TYPE)
8442 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8444 retry:
8445 type_low_bound = TYPE_MIN_VALUE (type);
8446 type_high_bound = TYPE_MAX_VALUE (type);
8448 /* If at least one bound of the type is a constant integer, we can check
8449 ourselves and maybe make a decision. If no such decision is possible, but
8450 this type is a subtype, try checking against that. Otherwise, use
8451 fits_to_tree_p, which checks against the precision.
8453 Compute the status for each possibly constant bound, and return if we see
8454 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8455 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8456 for "constant known to fit". */
8458 /* Check if c >= type_low_bound. */
8459 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8461 if (tree_int_cst_lt (c, type_low_bound))
8462 return false;
8463 ok_for_low_bound = true;
8465 else
8466 ok_for_low_bound = false;
8468 /* Check if c <= type_high_bound. */
8469 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8471 if (tree_int_cst_lt (type_high_bound, c))
8472 return false;
8473 ok_for_high_bound = true;
8475 else
8476 ok_for_high_bound = false;
8478 /* If the constant fits both bounds, the result is known. */
8479 if (ok_for_low_bound && ok_for_high_bound)
8480 return true;
8482 /* Perform some generic filtering which may allow making a decision
8483 even if the bounds are not constant. First, negative integers
8484 never fit in unsigned types, */
8485 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8486 return false;
8488 /* Second, narrower types always fit in wider ones. */
8489 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8490 return true;
8492 /* Third, unsigned integers with top bit set never fit signed types. */
8493 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8495 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8496 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8498 /* When a tree_cst is converted to a wide-int, the precision
8499 is taken from the type. However, if the precision of the
8500 mode underneath the type is smaller than that, it is
8501 possible that the value will not fit. The test below
8502 fails if any bit is set between the sign bit of the
8503 underlying mode and the top bit of the type. */
8504 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8505 return false;
8507 else if (wi::neg_p (wi::to_wide (c)))
8508 return false;
8511 /* If we haven't been able to decide at this point, there nothing more we
8512 can check ourselves here. Look at the base type if we have one and it
8513 has the same precision. */
8514 if (TREE_CODE (type) == INTEGER_TYPE
8515 && TREE_TYPE (type) != 0
8516 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8518 type = TREE_TYPE (type);
8519 goto retry;
8522 /* Or to fits_to_tree_p, if nothing else. */
8523 return wi::fits_to_tree_p (wi::to_wide (c), type);
8526 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8527 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8528 represented (assuming two's-complement arithmetic) within the bit
8529 precision of the type are returned instead. */
8531 void
8532 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8534 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8535 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8536 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8537 else
8539 if (TYPE_UNSIGNED (type))
8540 mpz_set_ui (min, 0);
8541 else
8543 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8544 wi::to_mpz (mn, min, SIGNED);
8548 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8549 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8550 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8551 else
8553 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8554 wi::to_mpz (mn, max, TYPE_SIGN (type));
8558 /* Return true if VAR is an automatic variable defined in function FN. */
8560 bool
8561 auto_var_in_fn_p (const_tree var, const_tree fn)
8563 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8564 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8565 || TREE_CODE (var) == PARM_DECL)
8566 && ! TREE_STATIC (var))
8567 || TREE_CODE (var) == LABEL_DECL
8568 || TREE_CODE (var) == RESULT_DECL));
8571 /* Subprogram of following function. Called by walk_tree.
8573 Return *TP if it is an automatic variable or parameter of the
8574 function passed in as DATA. */
8576 static tree
8577 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8579 tree fn = (tree) data;
8581 if (TYPE_P (*tp))
8582 *walk_subtrees = 0;
8584 else if (DECL_P (*tp)
8585 && auto_var_in_fn_p (*tp, fn))
8586 return *tp;
8588 return NULL_TREE;
8591 /* Returns true if T is, contains, or refers to a type with variable
8592 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8593 arguments, but not the return type. If FN is nonzero, only return
8594 true if a modifier of the type or position of FN is a variable or
8595 parameter inside FN.
8597 This concept is more general than that of C99 'variably modified types':
8598 in C99, a struct type is never variably modified because a VLA may not
8599 appear as a structure member. However, in GNU C code like:
8601 struct S { int i[f()]; };
8603 is valid, and other languages may define similar constructs. */
8605 bool
8606 variably_modified_type_p (tree type, tree fn)
8608 tree t;
8610 /* Test if T is either variable (if FN is zero) or an expression containing
8611 a variable in FN. If TYPE isn't gimplified, return true also if
8612 gimplify_one_sizepos would gimplify the expression into a local
8613 variable. */
8614 #define RETURN_TRUE_IF_VAR(T) \
8615 do { tree _t = (T); \
8616 if (_t != NULL_TREE \
8617 && _t != error_mark_node \
8618 && TREE_CODE (_t) != INTEGER_CST \
8619 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8620 && (!fn \
8621 || (!TYPE_SIZES_GIMPLIFIED (type) \
8622 && !is_gimple_sizepos (_t)) \
8623 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8624 return true; } while (0)
8626 if (type == error_mark_node)
8627 return false;
8629 /* If TYPE itself has variable size, it is variably modified. */
8630 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8631 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8633 switch (TREE_CODE (type))
8635 case POINTER_TYPE:
8636 case REFERENCE_TYPE:
8637 case VECTOR_TYPE:
8638 /* Ada can have pointer types refering to themselves indirectly. */
8639 if (TREE_VISITED (type))
8640 return false;
8641 TREE_VISITED (type) = true;
8642 if (variably_modified_type_p (TREE_TYPE (type), fn))
8644 TREE_VISITED (type) = false;
8645 return true;
8647 TREE_VISITED (type) = false;
8648 break;
8650 case FUNCTION_TYPE:
8651 case METHOD_TYPE:
8652 /* If TYPE is a function type, it is variably modified if the
8653 return type is variably modified. */
8654 if (variably_modified_type_p (TREE_TYPE (type), fn))
8655 return true;
8656 break;
8658 case INTEGER_TYPE:
8659 case REAL_TYPE:
8660 case FIXED_POINT_TYPE:
8661 case ENUMERAL_TYPE:
8662 case BOOLEAN_TYPE:
8663 /* Scalar types are variably modified if their end points
8664 aren't constant. */
8665 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8666 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8667 break;
8669 case RECORD_TYPE:
8670 case UNION_TYPE:
8671 case QUAL_UNION_TYPE:
8672 /* We can't see if any of the fields are variably-modified by the
8673 definition we normally use, since that would produce infinite
8674 recursion via pointers. */
8675 /* This is variably modified if some field's type is. */
8676 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8677 if (TREE_CODE (t) == FIELD_DECL)
8679 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8680 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8681 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8683 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8684 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8686 break;
8688 case ARRAY_TYPE:
8689 /* Do not call ourselves to avoid infinite recursion. This is
8690 variably modified if the element type is. */
8691 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8692 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8693 break;
8695 default:
8696 break;
8699 /* The current language may have other cases to check, but in general,
8700 all other types are not variably modified. */
8701 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8703 #undef RETURN_TRUE_IF_VAR
8706 /* Given a DECL or TYPE, return the scope in which it was declared, or
8707 NULL_TREE if there is no containing scope. */
8709 tree
8710 get_containing_scope (const_tree t)
8712 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8715 /* Return the innermost context enclosing DECL that is
8716 a FUNCTION_DECL, or zero if none. */
8718 tree
8719 decl_function_context (const_tree decl)
8721 tree context;
8723 if (TREE_CODE (decl) == ERROR_MARK)
8724 return 0;
8726 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8727 where we look up the function at runtime. Such functions always take
8728 a first argument of type 'pointer to real context'.
8730 C++ should really be fixed to use DECL_CONTEXT for the real context,
8731 and use something else for the "virtual context". */
8732 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8733 context
8734 = TYPE_MAIN_VARIANT
8735 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8736 else
8737 context = DECL_CONTEXT (decl);
8739 while (context && TREE_CODE (context) != FUNCTION_DECL)
8741 if (TREE_CODE (context) == BLOCK)
8742 context = BLOCK_SUPERCONTEXT (context);
8743 else
8744 context = get_containing_scope (context);
8747 return context;
8750 /* Return the innermost context enclosing DECL that is
8751 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8752 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8754 tree
8755 decl_type_context (const_tree decl)
8757 tree context = DECL_CONTEXT (decl);
8759 while (context)
8760 switch (TREE_CODE (context))
8762 case NAMESPACE_DECL:
8763 case TRANSLATION_UNIT_DECL:
8764 return NULL_TREE;
8766 case RECORD_TYPE:
8767 case UNION_TYPE:
8768 case QUAL_UNION_TYPE:
8769 return context;
8771 case TYPE_DECL:
8772 case FUNCTION_DECL:
8773 context = DECL_CONTEXT (context);
8774 break;
8776 case BLOCK:
8777 context = BLOCK_SUPERCONTEXT (context);
8778 break;
8780 default:
8781 gcc_unreachable ();
8784 return NULL_TREE;
8787 /* CALL is a CALL_EXPR. Return the declaration for the function
8788 called, or NULL_TREE if the called function cannot be
8789 determined. */
8791 tree
8792 get_callee_fndecl (const_tree call)
8794 tree addr;
8796 if (call == error_mark_node)
8797 return error_mark_node;
8799 /* It's invalid to call this function with anything but a
8800 CALL_EXPR. */
8801 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8803 /* The first operand to the CALL is the address of the function
8804 called. */
8805 addr = CALL_EXPR_FN (call);
8807 /* If there is no function, return early. */
8808 if (addr == NULL_TREE)
8809 return NULL_TREE;
8811 STRIP_NOPS (addr);
8813 /* If this is a readonly function pointer, extract its initial value. */
8814 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8815 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8816 && DECL_INITIAL (addr))
8817 addr = DECL_INITIAL (addr);
8819 /* If the address is just `&f' for some function `f', then we know
8820 that `f' is being called. */
8821 if (TREE_CODE (addr) == ADDR_EXPR
8822 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8823 return TREE_OPERAND (addr, 0);
8825 /* We couldn't figure out what was being called. */
8826 return NULL_TREE;
8829 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8830 return the associated function code, otherwise return CFN_LAST. */
8832 combined_fn
8833 get_call_combined_fn (const_tree call)
8835 /* It's invalid to call this function with anything but a CALL_EXPR. */
8836 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8838 if (!CALL_EXPR_FN (call))
8839 return as_combined_fn (CALL_EXPR_IFN (call));
8841 tree fndecl = get_callee_fndecl (call);
8842 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8843 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8845 return CFN_LAST;
8848 #define TREE_MEM_USAGE_SPACES 40
8850 /* Print debugging information about tree nodes generated during the compile,
8851 and any language-specific information. */
8853 void
8854 dump_tree_statistics (void)
8856 if (GATHER_STATISTICS)
8858 int i;
8859 int total_nodes, total_bytes;
8860 fprintf (stderr, "\nKind Nodes Bytes\n");
8861 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8862 total_nodes = total_bytes = 0;
8863 for (i = 0; i < (int) all_kinds; i++)
8865 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8866 tree_node_counts[i], tree_node_sizes[i]);
8867 total_nodes += tree_node_counts[i];
8868 total_bytes += tree_node_sizes[i];
8870 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8871 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8872 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8873 fprintf (stderr, "Code Nodes\n");
8874 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8875 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8876 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
8877 tree_code_counts[i]);
8878 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8879 fprintf (stderr, "\n");
8880 ssanames_print_statistics ();
8881 fprintf (stderr, "\n");
8882 phinodes_print_statistics ();
8883 fprintf (stderr, "\n");
8885 else
8886 fprintf (stderr, "(No per-node statistics)\n");
8888 print_type_hash_statistics ();
8889 print_debug_expr_statistics ();
8890 print_value_expr_statistics ();
8891 lang_hooks.print_statistics ();
8894 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8896 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8898 unsigned
8899 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8901 /* This relies on the raw feedback's top 4 bits being zero. */
8902 #define FEEDBACK(X) ((X) * 0x04c11db7)
8903 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8904 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8905 static const unsigned syndromes[16] =
8907 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8908 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8909 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8910 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8912 #undef FEEDBACK
8913 #undef SYNDROME
8915 value <<= (32 - bytes * 8);
8916 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8918 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8920 chksum = (chksum << 4) ^ feedback;
8923 return chksum;
8926 /* Generate a crc32 of a string. */
8928 unsigned
8929 crc32_string (unsigned chksum, const char *string)
8932 chksum = crc32_byte (chksum, *string);
8933 while (*string++);
8934 return chksum;
8937 /* P is a string that will be used in a symbol. Mask out any characters
8938 that are not valid in that context. */
8940 void
8941 clean_symbol_name (char *p)
8943 for (; *p; p++)
8944 if (! (ISALNUM (*p)
8945 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8946 || *p == '$'
8947 #endif
8948 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8949 || *p == '.'
8950 #endif
8952 *p = '_';
8955 /* For anonymous aggregate types, we need some sort of name to
8956 hold on to. In practice, this should not appear, but it should
8957 not be harmful if it does. */
8958 bool
8959 anon_aggrname_p(const_tree id_node)
8961 #ifndef NO_DOT_IN_LABEL
8962 return (IDENTIFIER_POINTER (id_node)[0] == '.'
8963 && IDENTIFIER_POINTER (id_node)[1] == '_');
8964 #else /* NO_DOT_IN_LABEL */
8965 #ifndef NO_DOLLAR_IN_LABEL
8966 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
8967 && IDENTIFIER_POINTER (id_node)[1] == '_');
8968 #else /* NO_DOLLAR_IN_LABEL */
8969 #define ANON_AGGRNAME_PREFIX "__anon_"
8970 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
8971 sizeof (ANON_AGGRNAME_PREFIX) - 1));
8972 #endif /* NO_DOLLAR_IN_LABEL */
8973 #endif /* NO_DOT_IN_LABEL */
8976 /* Return a format for an anonymous aggregate name. */
8977 const char *
8978 anon_aggrname_format()
8980 #ifndef NO_DOT_IN_LABEL
8981 return "._%d";
8982 #else /* NO_DOT_IN_LABEL */
8983 #ifndef NO_DOLLAR_IN_LABEL
8984 return "$_%d";
8985 #else /* NO_DOLLAR_IN_LABEL */
8986 return "__anon_%d";
8987 #endif /* NO_DOLLAR_IN_LABEL */
8988 #endif /* NO_DOT_IN_LABEL */
8991 /* Generate a name for a special-purpose function.
8992 The generated name may need to be unique across the whole link.
8993 Changes to this function may also require corresponding changes to
8994 xstrdup_mask_random.
8995 TYPE is some string to identify the purpose of this function to the
8996 linker or collect2; it must start with an uppercase letter,
8997 one of:
8998 I - for constructors
8999 D - for destructors
9000 N - for C++ anonymous namespaces
9001 F - for DWARF unwind frame information. */
9003 tree
9004 get_file_function_name (const char *type)
9006 char *buf;
9007 const char *p;
9008 char *q;
9010 /* If we already have a name we know to be unique, just use that. */
9011 if (first_global_object_name)
9012 p = q = ASTRDUP (first_global_object_name);
9013 /* If the target is handling the constructors/destructors, they
9014 will be local to this file and the name is only necessary for
9015 debugging purposes.
9016 We also assign sub_I and sub_D sufixes to constructors called from
9017 the global static constructors. These are always local. */
9018 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9019 || (strncmp (type, "sub_", 4) == 0
9020 && (type[4] == 'I' || type[4] == 'D')))
9022 const char *file = main_input_filename;
9023 if (! file)
9024 file = LOCATION_FILE (input_location);
9025 /* Just use the file's basename, because the full pathname
9026 might be quite long. */
9027 p = q = ASTRDUP (lbasename (file));
9029 else
9031 /* Otherwise, the name must be unique across the entire link.
9032 We don't have anything that we know to be unique to this translation
9033 unit, so use what we do have and throw in some randomness. */
9034 unsigned len;
9035 const char *name = weak_global_object_name;
9036 const char *file = main_input_filename;
9038 if (! name)
9039 name = "";
9040 if (! file)
9041 file = LOCATION_FILE (input_location);
9043 len = strlen (file);
9044 q = (char *) alloca (9 + 19 + len + 1);
9045 memcpy (q, file, len + 1);
9047 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9048 crc32_string (0, name), get_random_seed (false));
9050 p = q;
9053 clean_symbol_name (q);
9054 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9055 + strlen (type));
9057 /* Set up the name of the file-level functions we may need.
9058 Use a global object (which is already required to be unique over
9059 the program) rather than the file name (which imposes extra
9060 constraints). */
9061 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9063 return get_identifier (buf);
9066 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9068 /* Complain that the tree code of NODE does not match the expected 0
9069 terminated list of trailing codes. The trailing code list can be
9070 empty, for a more vague error message. FILE, LINE, and FUNCTION
9071 are of the caller. */
9073 void
9074 tree_check_failed (const_tree node, const char *file,
9075 int line, const char *function, ...)
9077 va_list args;
9078 const char *buffer;
9079 unsigned length = 0;
9080 enum tree_code code;
9082 va_start (args, function);
9083 while ((code = (enum tree_code) va_arg (args, int)))
9084 length += 4 + strlen (get_tree_code_name (code));
9085 va_end (args);
9086 if (length)
9088 char *tmp;
9089 va_start (args, function);
9090 length += strlen ("expected ");
9091 buffer = tmp = (char *) alloca (length);
9092 length = 0;
9093 while ((code = (enum tree_code) va_arg (args, int)))
9095 const char *prefix = length ? " or " : "expected ";
9097 strcpy (tmp + length, prefix);
9098 length += strlen (prefix);
9099 strcpy (tmp + length, get_tree_code_name (code));
9100 length += strlen (get_tree_code_name (code));
9102 va_end (args);
9104 else
9105 buffer = "unexpected node";
9107 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9108 buffer, get_tree_code_name (TREE_CODE (node)),
9109 function, trim_filename (file), line);
9112 /* Complain that the tree code of NODE does match the expected 0
9113 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9114 the caller. */
9116 void
9117 tree_not_check_failed (const_tree node, const char *file,
9118 int line, const char *function, ...)
9120 va_list args;
9121 char *buffer;
9122 unsigned length = 0;
9123 enum tree_code code;
9125 va_start (args, function);
9126 while ((code = (enum tree_code) va_arg (args, int)))
9127 length += 4 + strlen (get_tree_code_name (code));
9128 va_end (args);
9129 va_start (args, function);
9130 buffer = (char *) alloca (length);
9131 length = 0;
9132 while ((code = (enum tree_code) va_arg (args, int)))
9134 if (length)
9136 strcpy (buffer + length, " or ");
9137 length += 4;
9139 strcpy (buffer + length, get_tree_code_name (code));
9140 length += strlen (get_tree_code_name (code));
9142 va_end (args);
9144 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9145 buffer, get_tree_code_name (TREE_CODE (node)),
9146 function, trim_filename (file), line);
9149 /* Similar to tree_check_failed, except that we check for a class of tree
9150 code, given in CL. */
9152 void
9153 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9154 const char *file, int line, const char *function)
9156 internal_error
9157 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9158 TREE_CODE_CLASS_STRING (cl),
9159 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9160 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9163 /* Similar to tree_check_failed, except that instead of specifying a
9164 dozen codes, use the knowledge that they're all sequential. */
9166 void
9167 tree_range_check_failed (const_tree node, const char *file, int line,
9168 const char *function, enum tree_code c1,
9169 enum tree_code c2)
9171 char *buffer;
9172 unsigned length = 0;
9173 unsigned int c;
9175 for (c = c1; c <= c2; ++c)
9176 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9178 length += strlen ("expected ");
9179 buffer = (char *) alloca (length);
9180 length = 0;
9182 for (c = c1; c <= c2; ++c)
9184 const char *prefix = length ? " or " : "expected ";
9186 strcpy (buffer + length, prefix);
9187 length += strlen (prefix);
9188 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9189 length += strlen (get_tree_code_name ((enum tree_code) c));
9192 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9193 buffer, get_tree_code_name (TREE_CODE (node)),
9194 function, trim_filename (file), line);
9198 /* Similar to tree_check_failed, except that we check that a tree does
9199 not have the specified code, given in CL. */
9201 void
9202 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9203 const char *file, int line, const char *function)
9205 internal_error
9206 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9207 TREE_CODE_CLASS_STRING (cl),
9208 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9209 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9213 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9215 void
9216 omp_clause_check_failed (const_tree node, const char *file, int line,
9217 const char *function, enum omp_clause_code code)
9219 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9220 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9221 function, trim_filename (file), line);
9225 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9227 void
9228 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9229 const char *function, enum omp_clause_code c1,
9230 enum omp_clause_code c2)
9232 char *buffer;
9233 unsigned length = 0;
9234 unsigned int c;
9236 for (c = c1; c <= c2; ++c)
9237 length += 4 + strlen (omp_clause_code_name[c]);
9239 length += strlen ("expected ");
9240 buffer = (char *) alloca (length);
9241 length = 0;
9243 for (c = c1; c <= c2; ++c)
9245 const char *prefix = length ? " or " : "expected ";
9247 strcpy (buffer + length, prefix);
9248 length += strlen (prefix);
9249 strcpy (buffer + length, omp_clause_code_name[c]);
9250 length += strlen (omp_clause_code_name[c]);
9253 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9254 buffer, omp_clause_code_name[TREE_CODE (node)],
9255 function, trim_filename (file), line);
9259 #undef DEFTREESTRUCT
9260 #define DEFTREESTRUCT(VAL, NAME) NAME,
9262 static const char *ts_enum_names[] = {
9263 #include "treestruct.def"
9265 #undef DEFTREESTRUCT
9267 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9269 /* Similar to tree_class_check_failed, except that we check for
9270 whether CODE contains the tree structure identified by EN. */
9272 void
9273 tree_contains_struct_check_failed (const_tree node,
9274 const enum tree_node_structure_enum en,
9275 const char *file, int line,
9276 const char *function)
9278 internal_error
9279 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9280 TS_ENUM_NAME (en),
9281 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9285 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9286 (dynamically sized) vector. */
9288 void
9289 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9290 const char *function)
9292 internal_error
9293 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9294 idx + 1, len, function, trim_filename (file), line);
9297 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9298 (dynamically sized) vector. */
9300 void
9301 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9302 const char *function)
9304 internal_error
9305 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9306 idx + 1, len, function, trim_filename (file), line);
9309 /* Similar to above, except that the check is for the bounds of the operand
9310 vector of an expression node EXP. */
9312 void
9313 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9314 int line, const char *function)
9316 enum tree_code code = TREE_CODE (exp);
9317 internal_error
9318 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9319 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9320 function, trim_filename (file), line);
9323 /* Similar to above, except that the check is for the number of
9324 operands of an OMP_CLAUSE node. */
9326 void
9327 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9328 int line, const char *function)
9330 internal_error
9331 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9332 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9333 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9334 trim_filename (file), line);
9336 #endif /* ENABLE_TREE_CHECKING */
9338 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9339 and mapped to the machine mode MODE. Initialize its fields and build
9340 the information necessary for debugging output. */
9342 static tree
9343 make_vector_type (tree innertype, int nunits, machine_mode mode)
9345 tree t;
9346 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9348 t = make_node (VECTOR_TYPE);
9349 TREE_TYPE (t) = mv_innertype;
9350 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9351 SET_TYPE_MODE (t, mode);
9353 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9354 SET_TYPE_STRUCTURAL_EQUALITY (t);
9355 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9356 || mode != VOIDmode)
9357 && !VECTOR_BOOLEAN_TYPE_P (t))
9358 TYPE_CANONICAL (t)
9359 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9361 layout_type (t);
9363 hashval_t hash = type_hash_canon_hash (t);
9364 t = type_hash_canon (hash, t);
9366 /* We have built a main variant, based on the main variant of the
9367 inner type. Use it to build the variant we return. */
9368 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9369 && TREE_TYPE (t) != innertype)
9370 return build_type_attribute_qual_variant (t,
9371 TYPE_ATTRIBUTES (innertype),
9372 TYPE_QUALS (innertype));
9374 return t;
9377 static tree
9378 make_or_reuse_type (unsigned size, int unsignedp)
9380 int i;
9382 if (size == INT_TYPE_SIZE)
9383 return unsignedp ? unsigned_type_node : integer_type_node;
9384 if (size == CHAR_TYPE_SIZE)
9385 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9386 if (size == SHORT_TYPE_SIZE)
9387 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9388 if (size == LONG_TYPE_SIZE)
9389 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9390 if (size == LONG_LONG_TYPE_SIZE)
9391 return (unsignedp ? long_long_unsigned_type_node
9392 : long_long_integer_type_node);
9394 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9395 if (size == int_n_data[i].bitsize
9396 && int_n_enabled_p[i])
9397 return (unsignedp ? int_n_trees[i].unsigned_type
9398 : int_n_trees[i].signed_type);
9400 if (unsignedp)
9401 return make_unsigned_type (size);
9402 else
9403 return make_signed_type (size);
9406 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9408 static tree
9409 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9411 if (satp)
9413 if (size == SHORT_FRACT_TYPE_SIZE)
9414 return unsignedp ? sat_unsigned_short_fract_type_node
9415 : sat_short_fract_type_node;
9416 if (size == FRACT_TYPE_SIZE)
9417 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9418 if (size == LONG_FRACT_TYPE_SIZE)
9419 return unsignedp ? sat_unsigned_long_fract_type_node
9420 : sat_long_fract_type_node;
9421 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9422 return unsignedp ? sat_unsigned_long_long_fract_type_node
9423 : sat_long_long_fract_type_node;
9425 else
9427 if (size == SHORT_FRACT_TYPE_SIZE)
9428 return unsignedp ? unsigned_short_fract_type_node
9429 : short_fract_type_node;
9430 if (size == FRACT_TYPE_SIZE)
9431 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9432 if (size == LONG_FRACT_TYPE_SIZE)
9433 return unsignedp ? unsigned_long_fract_type_node
9434 : long_fract_type_node;
9435 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9436 return unsignedp ? unsigned_long_long_fract_type_node
9437 : long_long_fract_type_node;
9440 return make_fract_type (size, unsignedp, satp);
9443 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9445 static tree
9446 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9448 if (satp)
9450 if (size == SHORT_ACCUM_TYPE_SIZE)
9451 return unsignedp ? sat_unsigned_short_accum_type_node
9452 : sat_short_accum_type_node;
9453 if (size == ACCUM_TYPE_SIZE)
9454 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9455 if (size == LONG_ACCUM_TYPE_SIZE)
9456 return unsignedp ? sat_unsigned_long_accum_type_node
9457 : sat_long_accum_type_node;
9458 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9459 return unsignedp ? sat_unsigned_long_long_accum_type_node
9460 : sat_long_long_accum_type_node;
9462 else
9464 if (size == SHORT_ACCUM_TYPE_SIZE)
9465 return unsignedp ? unsigned_short_accum_type_node
9466 : short_accum_type_node;
9467 if (size == ACCUM_TYPE_SIZE)
9468 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9469 if (size == LONG_ACCUM_TYPE_SIZE)
9470 return unsignedp ? unsigned_long_accum_type_node
9471 : long_accum_type_node;
9472 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9473 return unsignedp ? unsigned_long_long_accum_type_node
9474 : long_long_accum_type_node;
9477 return make_accum_type (size, unsignedp, satp);
9481 /* Create an atomic variant node for TYPE. This routine is called
9482 during initialization of data types to create the 5 basic atomic
9483 types. The generic build_variant_type function requires these to
9484 already be set up in order to function properly, so cannot be
9485 called from there. If ALIGN is non-zero, then ensure alignment is
9486 overridden to this value. */
9488 static tree
9489 build_atomic_base (tree type, unsigned int align)
9491 tree t;
9493 /* Make sure its not already registered. */
9494 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9495 return t;
9497 t = build_variant_type_copy (type);
9498 set_type_quals (t, TYPE_QUAL_ATOMIC);
9500 if (align)
9501 SET_TYPE_ALIGN (t, align);
9503 return t;
9506 /* Information about the _FloatN and _FloatNx types. This must be in
9507 the same order as the corresponding TI_* enum values. */
9508 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9510 { 16, false },
9511 { 32, false },
9512 { 64, false },
9513 { 128, false },
9514 { 32, true },
9515 { 64, true },
9516 { 128, true },
9520 /* Create nodes for all integer types (and error_mark_node) using the sizes
9521 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9523 void
9524 build_common_tree_nodes (bool signed_char)
9526 int i;
9528 error_mark_node = make_node (ERROR_MARK);
9529 TREE_TYPE (error_mark_node) = error_mark_node;
9531 initialize_sizetypes ();
9533 /* Define both `signed char' and `unsigned char'. */
9534 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9535 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9536 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9537 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9539 /* Define `char', which is like either `signed char' or `unsigned char'
9540 but not the same as either. */
9541 char_type_node
9542 = (signed_char
9543 ? make_signed_type (CHAR_TYPE_SIZE)
9544 : make_unsigned_type (CHAR_TYPE_SIZE));
9545 TYPE_STRING_FLAG (char_type_node) = 1;
9547 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9548 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9549 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9550 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9551 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9552 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9553 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9554 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9556 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9558 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9559 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9560 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9561 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9563 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9564 && int_n_enabled_p[i])
9566 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9567 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9571 /* Define a boolean type. This type only represents boolean values but
9572 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9573 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9574 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9575 TYPE_PRECISION (boolean_type_node) = 1;
9576 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9578 /* Define what type to use for size_t. */
9579 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9580 size_type_node = unsigned_type_node;
9581 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9582 size_type_node = long_unsigned_type_node;
9583 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9584 size_type_node = long_long_unsigned_type_node;
9585 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9586 size_type_node = short_unsigned_type_node;
9587 else
9589 int i;
9591 size_type_node = NULL_TREE;
9592 for (i = 0; i < NUM_INT_N_ENTS; i++)
9593 if (int_n_enabled_p[i])
9595 char name[50];
9596 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9598 if (strcmp (name, SIZE_TYPE) == 0)
9600 size_type_node = int_n_trees[i].unsigned_type;
9603 if (size_type_node == NULL_TREE)
9604 gcc_unreachable ();
9607 /* Define what type to use for ptrdiff_t. */
9608 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9609 ptrdiff_type_node = integer_type_node;
9610 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9611 ptrdiff_type_node = long_integer_type_node;
9612 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9613 ptrdiff_type_node = long_long_integer_type_node;
9614 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9615 ptrdiff_type_node = short_integer_type_node;
9616 else
9618 ptrdiff_type_node = NULL_TREE;
9619 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9620 if (int_n_enabled_p[i])
9622 char name[50];
9623 sprintf (name, "__int%d", int_n_data[i].bitsize);
9624 if (strcmp (name, PTRDIFF_TYPE) == 0)
9625 ptrdiff_type_node = int_n_trees[i].signed_type;
9627 if (ptrdiff_type_node == NULL_TREE)
9628 gcc_unreachable ();
9631 /* Fill in the rest of the sized types. Reuse existing type nodes
9632 when possible. */
9633 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9634 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9635 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9636 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9637 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9639 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9640 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9641 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9642 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9643 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9645 /* Don't call build_qualified type for atomics. That routine does
9646 special processing for atomics, and until they are initialized
9647 it's better not to make that call.
9649 Check to see if there is a target override for atomic types. */
9651 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9652 targetm.atomic_align_for_mode (QImode));
9653 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9654 targetm.atomic_align_for_mode (HImode));
9655 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9656 targetm.atomic_align_for_mode (SImode));
9657 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9658 targetm.atomic_align_for_mode (DImode));
9659 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9660 targetm.atomic_align_for_mode (TImode));
9662 access_public_node = get_identifier ("public");
9663 access_protected_node = get_identifier ("protected");
9664 access_private_node = get_identifier ("private");
9666 /* Define these next since types below may used them. */
9667 integer_zero_node = build_int_cst (integer_type_node, 0);
9668 integer_one_node = build_int_cst (integer_type_node, 1);
9669 integer_three_node = build_int_cst (integer_type_node, 3);
9670 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9672 size_zero_node = size_int (0);
9673 size_one_node = size_int (1);
9674 bitsize_zero_node = bitsize_int (0);
9675 bitsize_one_node = bitsize_int (1);
9676 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9678 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9679 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9681 void_type_node = make_node (VOID_TYPE);
9682 layout_type (void_type_node);
9684 pointer_bounds_type_node = targetm.chkp_bound_type ();
9686 /* We are not going to have real types in C with less than byte alignment,
9687 so we might as well not have any types that claim to have it. */
9688 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9689 TYPE_USER_ALIGN (void_type_node) = 0;
9691 void_node = make_node (VOID_CST);
9692 TREE_TYPE (void_node) = void_type_node;
9694 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9695 layout_type (TREE_TYPE (null_pointer_node));
9697 ptr_type_node = build_pointer_type (void_type_node);
9698 const_ptr_type_node
9699 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9700 for (unsigned i = 0;
9701 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9702 ++i)
9703 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9705 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9707 float_type_node = make_node (REAL_TYPE);
9708 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9709 layout_type (float_type_node);
9711 double_type_node = make_node (REAL_TYPE);
9712 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9713 layout_type (double_type_node);
9715 long_double_type_node = make_node (REAL_TYPE);
9716 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9717 layout_type (long_double_type_node);
9719 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9721 int n = floatn_nx_types[i].n;
9722 bool extended = floatn_nx_types[i].extended;
9723 scalar_float_mode mode;
9724 if (!targetm.floatn_mode (n, extended).exists (&mode))
9725 continue;
9726 int precision = GET_MODE_PRECISION (mode);
9727 /* Work around the rs6000 KFmode having precision 113 not
9728 128. */
9729 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9730 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9731 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9732 if (!extended)
9733 gcc_assert (min_precision == n);
9734 if (precision < min_precision)
9735 precision = min_precision;
9736 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9737 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9738 layout_type (FLOATN_NX_TYPE_NODE (i));
9739 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9742 float_ptr_type_node = build_pointer_type (float_type_node);
9743 double_ptr_type_node = build_pointer_type (double_type_node);
9744 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9745 integer_ptr_type_node = build_pointer_type (integer_type_node);
9747 /* Fixed size integer types. */
9748 uint16_type_node = make_or_reuse_type (16, 1);
9749 uint32_type_node = make_or_reuse_type (32, 1);
9750 uint64_type_node = make_or_reuse_type (64, 1);
9752 /* Decimal float types. */
9753 dfloat32_type_node = make_node (REAL_TYPE);
9754 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9755 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9756 layout_type (dfloat32_type_node);
9757 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9759 dfloat64_type_node = make_node (REAL_TYPE);
9760 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9761 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9762 layout_type (dfloat64_type_node);
9763 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9765 dfloat128_type_node = make_node (REAL_TYPE);
9766 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9767 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9768 layout_type (dfloat128_type_node);
9769 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9771 complex_integer_type_node = build_complex_type (integer_type_node, true);
9772 complex_float_type_node = build_complex_type (float_type_node, true);
9773 complex_double_type_node = build_complex_type (double_type_node, true);
9774 complex_long_double_type_node = build_complex_type (long_double_type_node,
9775 true);
9777 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9779 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9780 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9781 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9784 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9785 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9786 sat_ ## KIND ## _type_node = \
9787 make_sat_signed_ ## KIND ## _type (SIZE); \
9788 sat_unsigned_ ## KIND ## _type_node = \
9789 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9790 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9791 unsigned_ ## KIND ## _type_node = \
9792 make_unsigned_ ## KIND ## _type (SIZE);
9794 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9795 sat_ ## WIDTH ## KIND ## _type_node = \
9796 make_sat_signed_ ## KIND ## _type (SIZE); \
9797 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9798 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9799 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9800 unsigned_ ## WIDTH ## KIND ## _type_node = \
9801 make_unsigned_ ## KIND ## _type (SIZE);
9803 /* Make fixed-point type nodes based on four different widths. */
9804 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9805 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9806 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9807 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9808 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9810 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9811 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9812 NAME ## _type_node = \
9813 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9814 u ## NAME ## _type_node = \
9815 make_or_reuse_unsigned_ ## KIND ## _type \
9816 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9817 sat_ ## NAME ## _type_node = \
9818 make_or_reuse_sat_signed_ ## KIND ## _type \
9819 (GET_MODE_BITSIZE (MODE ## mode)); \
9820 sat_u ## NAME ## _type_node = \
9821 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9822 (GET_MODE_BITSIZE (U ## MODE ## mode));
9824 /* Fixed-point type and mode nodes. */
9825 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9826 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9827 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9828 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9829 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9830 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9831 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9832 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9833 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9834 MAKE_FIXED_MODE_NODE (accum, da, DA)
9835 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9838 tree t = targetm.build_builtin_va_list ();
9840 /* Many back-ends define record types without setting TYPE_NAME.
9841 If we copied the record type here, we'd keep the original
9842 record type without a name. This breaks name mangling. So,
9843 don't copy record types and let c_common_nodes_and_builtins()
9844 declare the type to be __builtin_va_list. */
9845 if (TREE_CODE (t) != RECORD_TYPE)
9846 t = build_variant_type_copy (t);
9848 va_list_type_node = t;
9852 /* Modify DECL for given flags.
9853 TM_PURE attribute is set only on types, so the function will modify
9854 DECL's type when ECF_TM_PURE is used. */
9856 void
9857 set_call_expr_flags (tree decl, int flags)
9859 if (flags & ECF_NOTHROW)
9860 TREE_NOTHROW (decl) = 1;
9861 if (flags & ECF_CONST)
9862 TREE_READONLY (decl) = 1;
9863 if (flags & ECF_PURE)
9864 DECL_PURE_P (decl) = 1;
9865 if (flags & ECF_LOOPING_CONST_OR_PURE)
9866 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9867 if (flags & ECF_NOVOPS)
9868 DECL_IS_NOVOPS (decl) = 1;
9869 if (flags & ECF_NORETURN)
9870 TREE_THIS_VOLATILE (decl) = 1;
9871 if (flags & ECF_MALLOC)
9872 DECL_IS_MALLOC (decl) = 1;
9873 if (flags & ECF_RETURNS_TWICE)
9874 DECL_IS_RETURNS_TWICE (decl) = 1;
9875 if (flags & ECF_LEAF)
9876 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9877 NULL, DECL_ATTRIBUTES (decl));
9878 if (flags & ECF_COLD)
9879 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9880 NULL, DECL_ATTRIBUTES (decl));
9881 if (flags & ECF_RET1)
9882 DECL_ATTRIBUTES (decl)
9883 = tree_cons (get_identifier ("fn spec"),
9884 build_tree_list (NULL_TREE, build_string (1, "1")),
9885 DECL_ATTRIBUTES (decl));
9886 if ((flags & ECF_TM_PURE) && flag_tm)
9887 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9888 /* Looping const or pure is implied by noreturn.
9889 There is currently no way to declare looping const or looping pure alone. */
9890 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9891 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9895 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9897 static void
9898 local_define_builtin (const char *name, tree type, enum built_in_function code,
9899 const char *library_name, int ecf_flags)
9901 tree decl;
9903 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9904 library_name, NULL_TREE);
9905 set_call_expr_flags (decl, ecf_flags);
9907 set_builtin_decl (code, decl, true);
9910 /* Call this function after instantiating all builtins that the language
9911 front end cares about. This will build the rest of the builtins
9912 and internal functions that are relied upon by the tree optimizers and
9913 the middle-end. */
9915 void
9916 build_common_builtin_nodes (void)
9918 tree tmp, ftype;
9919 int ecf_flags;
9921 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9922 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9924 ftype = build_function_type (void_type_node, void_list_node);
9925 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9926 local_define_builtin ("__builtin_unreachable", ftype,
9927 BUILT_IN_UNREACHABLE,
9928 "__builtin_unreachable",
9929 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9930 | ECF_CONST | ECF_COLD);
9931 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9932 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9933 "abort",
9934 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9937 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9938 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9940 ftype = build_function_type_list (ptr_type_node,
9941 ptr_type_node, const_ptr_type_node,
9942 size_type_node, NULL_TREE);
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9945 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9946 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9947 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9948 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9949 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9952 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9954 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9955 const_ptr_type_node, size_type_node,
9956 NULL_TREE);
9957 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9958 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9961 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9963 ftype = build_function_type_list (ptr_type_node,
9964 ptr_type_node, integer_type_node,
9965 size_type_node, NULL_TREE);
9966 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9967 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9970 /* If we're checking the stack, `alloca' can throw. */
9971 const int alloca_flags
9972 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9974 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9976 ftype = build_function_type_list (ptr_type_node,
9977 size_type_node, NULL_TREE);
9978 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9979 "alloca", alloca_flags);
9982 ftype = build_function_type_list (ptr_type_node, size_type_node,
9983 size_type_node, NULL_TREE);
9984 local_define_builtin ("__builtin_alloca_with_align", ftype,
9985 BUILT_IN_ALLOCA_WITH_ALIGN,
9986 "__builtin_alloca_with_align",
9987 alloca_flags);
9989 ftype = build_function_type_list (ptr_type_node, size_type_node,
9990 size_type_node, size_type_node, NULL_TREE);
9991 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9992 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9993 "__builtin_alloca_with_align_and_max",
9994 alloca_flags);
9996 ftype = build_function_type_list (void_type_node,
9997 ptr_type_node, ptr_type_node,
9998 ptr_type_node, NULL_TREE);
9999 local_define_builtin ("__builtin_init_trampoline", ftype,
10000 BUILT_IN_INIT_TRAMPOLINE,
10001 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10002 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10003 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10004 "__builtin_init_heap_trampoline",
10005 ECF_NOTHROW | ECF_LEAF);
10006 local_define_builtin ("__builtin_init_descriptor", ftype,
10007 BUILT_IN_INIT_DESCRIPTOR,
10008 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10010 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10011 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10012 BUILT_IN_ADJUST_TRAMPOLINE,
10013 "__builtin_adjust_trampoline",
10014 ECF_CONST | ECF_NOTHROW);
10015 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10016 BUILT_IN_ADJUST_DESCRIPTOR,
10017 "__builtin_adjust_descriptor",
10018 ECF_CONST | ECF_NOTHROW);
10020 ftype = build_function_type_list (void_type_node,
10021 ptr_type_node, ptr_type_node, NULL_TREE);
10022 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10023 BUILT_IN_NONLOCAL_GOTO,
10024 "__builtin_nonlocal_goto",
10025 ECF_NORETURN | ECF_NOTHROW);
10027 ftype = build_function_type_list (void_type_node,
10028 ptr_type_node, ptr_type_node, NULL_TREE);
10029 local_define_builtin ("__builtin_setjmp_setup", ftype,
10030 BUILT_IN_SETJMP_SETUP,
10031 "__builtin_setjmp_setup", ECF_NOTHROW);
10033 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10034 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10035 BUILT_IN_SETJMP_RECEIVER,
10036 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10038 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10039 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10040 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10042 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10043 local_define_builtin ("__builtin_stack_restore", ftype,
10044 BUILT_IN_STACK_RESTORE,
10045 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10047 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10048 const_ptr_type_node, size_type_node,
10049 NULL_TREE);
10050 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10051 "__builtin_memcmp_eq",
10052 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10054 /* If there's a possibility that we might use the ARM EABI, build the
10055 alternate __cxa_end_cleanup node used to resume from C++. */
10056 if (targetm.arm_eabi_unwinder)
10058 ftype = build_function_type_list (void_type_node, NULL_TREE);
10059 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10060 BUILT_IN_CXA_END_CLEANUP,
10061 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10064 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10065 local_define_builtin ("__builtin_unwind_resume", ftype,
10066 BUILT_IN_UNWIND_RESUME,
10067 ((targetm_common.except_unwind_info (&global_options)
10068 == UI_SJLJ)
10069 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10070 ECF_NORETURN);
10072 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10074 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10075 NULL_TREE);
10076 local_define_builtin ("__builtin_return_address", ftype,
10077 BUILT_IN_RETURN_ADDRESS,
10078 "__builtin_return_address",
10079 ECF_NOTHROW);
10082 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10083 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10085 ftype = build_function_type_list (void_type_node, ptr_type_node,
10086 ptr_type_node, NULL_TREE);
10087 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10088 local_define_builtin ("__cyg_profile_func_enter", ftype,
10089 BUILT_IN_PROFILE_FUNC_ENTER,
10090 "__cyg_profile_func_enter", 0);
10091 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10092 local_define_builtin ("__cyg_profile_func_exit", ftype,
10093 BUILT_IN_PROFILE_FUNC_EXIT,
10094 "__cyg_profile_func_exit", 0);
10097 /* The exception object and filter values from the runtime. The argument
10098 must be zero before exception lowering, i.e. from the front end. After
10099 exception lowering, it will be the region number for the exception
10100 landing pad. These functions are PURE instead of CONST to prevent
10101 them from being hoisted past the exception edge that will initialize
10102 its value in the landing pad. */
10103 ftype = build_function_type_list (ptr_type_node,
10104 integer_type_node, NULL_TREE);
10105 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10106 /* Only use TM_PURE if we have TM language support. */
10107 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10108 ecf_flags |= ECF_TM_PURE;
10109 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10110 "__builtin_eh_pointer", ecf_flags);
10112 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10113 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10114 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10115 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10117 ftype = build_function_type_list (void_type_node,
10118 integer_type_node, integer_type_node,
10119 NULL_TREE);
10120 local_define_builtin ("__builtin_eh_copy_values", ftype,
10121 BUILT_IN_EH_COPY_VALUES,
10122 "__builtin_eh_copy_values", ECF_NOTHROW);
10124 /* Complex multiplication and division. These are handled as builtins
10125 rather than optabs because emit_library_call_value doesn't support
10126 complex. Further, we can do slightly better with folding these
10127 beasties if the real and complex parts of the arguments are separate. */
10129 int mode;
10131 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10133 char mode_name_buf[4], *q;
10134 const char *p;
10135 enum built_in_function mcode, dcode;
10136 tree type, inner_type;
10137 const char *prefix = "__";
10139 if (targetm.libfunc_gnu_prefix)
10140 prefix = "__gnu_";
10142 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10143 if (type == NULL)
10144 continue;
10145 inner_type = TREE_TYPE (type);
10147 ftype = build_function_type_list (type, inner_type, inner_type,
10148 inner_type, inner_type, NULL_TREE);
10150 mcode = ((enum built_in_function)
10151 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10152 dcode = ((enum built_in_function)
10153 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10155 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10156 *q = TOLOWER (*p);
10157 *q = '\0';
10159 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10160 NULL);
10161 local_define_builtin (built_in_names[mcode], ftype, mcode,
10162 built_in_names[mcode],
10163 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10165 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10166 NULL);
10167 local_define_builtin (built_in_names[dcode], ftype, dcode,
10168 built_in_names[dcode],
10169 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10173 init_internal_fns ();
10176 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10177 better way.
10179 If we requested a pointer to a vector, build up the pointers that
10180 we stripped off while looking for the inner type. Similarly for
10181 return values from functions.
10183 The argument TYPE is the top of the chain, and BOTTOM is the
10184 new type which we will point to. */
10186 tree
10187 reconstruct_complex_type (tree type, tree bottom)
10189 tree inner, outer;
10191 if (TREE_CODE (type) == POINTER_TYPE)
10193 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10194 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10195 TYPE_REF_CAN_ALIAS_ALL (type));
10197 else if (TREE_CODE (type) == REFERENCE_TYPE)
10199 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10200 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10201 TYPE_REF_CAN_ALIAS_ALL (type));
10203 else if (TREE_CODE (type) == ARRAY_TYPE)
10205 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10206 outer = build_array_type (inner, TYPE_DOMAIN (type));
10208 else if (TREE_CODE (type) == FUNCTION_TYPE)
10210 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10211 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10213 else if (TREE_CODE (type) == METHOD_TYPE)
10215 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10216 /* The build_method_type_directly() routine prepends 'this' to argument list,
10217 so we must compensate by getting rid of it. */
10218 outer
10219 = build_method_type_directly
10220 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10221 inner,
10222 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10224 else if (TREE_CODE (type) == OFFSET_TYPE)
10226 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10227 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10229 else
10230 return bottom;
10232 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10233 TYPE_QUALS (type));
10236 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10237 the inner type. */
10238 tree
10239 build_vector_type_for_mode (tree innertype, machine_mode mode)
10241 int nunits;
10242 unsigned int bitsize;
10244 switch (GET_MODE_CLASS (mode))
10246 case MODE_VECTOR_INT:
10247 case MODE_VECTOR_FLOAT:
10248 case MODE_VECTOR_FRACT:
10249 case MODE_VECTOR_UFRACT:
10250 case MODE_VECTOR_ACCUM:
10251 case MODE_VECTOR_UACCUM:
10252 nunits = GET_MODE_NUNITS (mode);
10253 break;
10255 case MODE_INT:
10256 /* Check that there are no leftover bits. */
10257 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10258 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10259 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10260 break;
10262 default:
10263 gcc_unreachable ();
10266 return make_vector_type (innertype, nunits, mode);
10269 /* Similarly, but takes the inner type and number of units, which must be
10270 a power of two. */
10272 tree
10273 build_vector_type (tree innertype, int nunits)
10275 return make_vector_type (innertype, nunits, VOIDmode);
10278 /* Build truth vector with specified length and number of units. */
10280 tree
10281 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10283 machine_mode mask_mode
10284 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10286 unsigned HOST_WIDE_INT vsize;
10287 if (mask_mode == BLKmode)
10288 vsize = vector_size * BITS_PER_UNIT;
10289 else
10290 vsize = GET_MODE_BITSIZE (mask_mode);
10292 unsigned HOST_WIDE_INT esize = vsize / nunits;
10293 gcc_assert (esize * nunits == vsize);
10295 tree bool_type = build_nonstandard_boolean_type (esize);
10297 return make_vector_type (bool_type, nunits, mask_mode);
10300 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10302 tree
10303 build_same_sized_truth_vector_type (tree vectype)
10305 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10306 return vectype;
10308 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10310 if (!size)
10311 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10313 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10316 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10318 tree
10319 build_opaque_vector_type (tree innertype, int nunits)
10321 tree t = make_vector_type (innertype, nunits, VOIDmode);
10322 tree cand;
10323 /* We always build the non-opaque variant before the opaque one,
10324 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10325 cand = TYPE_NEXT_VARIANT (t);
10326 if (cand
10327 && TYPE_VECTOR_OPAQUE (cand)
10328 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10329 return cand;
10330 /* Othewise build a variant type and make sure to queue it after
10331 the non-opaque type. */
10332 cand = build_distinct_type_copy (t);
10333 TYPE_VECTOR_OPAQUE (cand) = true;
10334 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10335 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10336 TYPE_NEXT_VARIANT (t) = cand;
10337 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10338 return cand;
10342 /* Given an initializer INIT, return TRUE if INIT is zero or some
10343 aggregate of zeros. Otherwise return FALSE. */
10344 bool
10345 initializer_zerop (const_tree init)
10347 tree elt;
10349 STRIP_NOPS (init);
10351 switch (TREE_CODE (init))
10353 case INTEGER_CST:
10354 return integer_zerop (init);
10356 case REAL_CST:
10357 /* ??? Note that this is not correct for C4X float formats. There,
10358 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10359 negative exponent. */
10360 return real_zerop (init)
10361 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10363 case FIXED_CST:
10364 return fixed_zerop (init);
10366 case COMPLEX_CST:
10367 return integer_zerop (init)
10368 || (real_zerop (init)
10369 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10370 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10372 case VECTOR_CST:
10374 unsigned i;
10375 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10376 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10377 return false;
10378 return true;
10381 case CONSTRUCTOR:
10383 unsigned HOST_WIDE_INT idx;
10385 if (TREE_CLOBBER_P (init))
10386 return false;
10387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10388 if (!initializer_zerop (elt))
10389 return false;
10390 return true;
10393 case STRING_CST:
10395 int i;
10397 /* We need to loop through all elements to handle cases like
10398 "\0" and "\0foobar". */
10399 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10400 if (TREE_STRING_POINTER (init)[i] != '\0')
10401 return false;
10403 return true;
10406 default:
10407 return false;
10411 /* Check if vector VEC consists of all the equal elements and
10412 that the number of elements corresponds to the type of VEC.
10413 The function returns first element of the vector
10414 or NULL_TREE if the vector is not uniform. */
10415 tree
10416 uniform_vector_p (const_tree vec)
10418 tree first, t;
10419 unsigned i;
10421 if (vec == NULL_TREE)
10422 return NULL_TREE;
10424 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10426 if (TREE_CODE (vec) == VECTOR_CST)
10428 first = VECTOR_CST_ELT (vec, 0);
10429 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10430 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10431 return NULL_TREE;
10433 return first;
10436 else if (TREE_CODE (vec) == CONSTRUCTOR)
10438 first = error_mark_node;
10440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10442 if (i == 0)
10444 first = t;
10445 continue;
10447 if (!operand_equal_p (first, t, 0))
10448 return NULL_TREE;
10450 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10451 return NULL_TREE;
10453 return first;
10456 return NULL_TREE;
10459 /* Build an empty statement at location LOC. */
10461 tree
10462 build_empty_stmt (location_t loc)
10464 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10465 SET_EXPR_LOCATION (t, loc);
10466 return t;
10470 /* Build an OpenMP clause with code CODE. LOC is the location of the
10471 clause. */
10473 tree
10474 build_omp_clause (location_t loc, enum omp_clause_code code)
10476 tree t;
10477 int size, length;
10479 length = omp_clause_num_ops[code];
10480 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10482 record_node_allocation_statistics (OMP_CLAUSE, size);
10484 t = (tree) ggc_internal_alloc (size);
10485 memset (t, 0, size);
10486 TREE_SET_CODE (t, OMP_CLAUSE);
10487 OMP_CLAUSE_SET_CODE (t, code);
10488 OMP_CLAUSE_LOCATION (t) = loc;
10490 return t;
10493 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10494 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10495 Except for the CODE and operand count field, other storage for the
10496 object is initialized to zeros. */
10498 tree
10499 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10501 tree t;
10502 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10504 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10505 gcc_assert (len >= 1);
10507 record_node_allocation_statistics (code, length);
10509 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10511 TREE_SET_CODE (t, code);
10513 /* Can't use TREE_OPERAND to store the length because if checking is
10514 enabled, it will try to check the length before we store it. :-P */
10515 t->exp.operands[0] = build_int_cst (sizetype, len);
10517 return t;
10520 /* Helper function for build_call_* functions; build a CALL_EXPR with
10521 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10522 the argument slots. */
10524 static tree
10525 build_call_1 (tree return_type, tree fn, int nargs)
10527 tree t;
10529 t = build_vl_exp (CALL_EXPR, nargs + 3);
10530 TREE_TYPE (t) = return_type;
10531 CALL_EXPR_FN (t) = fn;
10532 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10534 return t;
10537 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10538 FN and a null static chain slot. NARGS is the number of call arguments
10539 which are specified as "..." arguments. */
10541 tree
10542 build_call_nary (tree return_type, tree fn, int nargs, ...)
10544 tree ret;
10545 va_list args;
10546 va_start (args, nargs);
10547 ret = build_call_valist (return_type, fn, nargs, args);
10548 va_end (args);
10549 return ret;
10552 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10553 FN and a null static chain slot. NARGS is the number of call arguments
10554 which are specified as a va_list ARGS. */
10556 tree
10557 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10559 tree t;
10560 int i;
10562 t = build_call_1 (return_type, fn, nargs);
10563 for (i = 0; i < nargs; i++)
10564 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10565 process_call_operands (t);
10566 return t;
10569 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10570 FN and a null static chain slot. NARGS is the number of call arguments
10571 which are specified as a tree array ARGS. */
10573 tree
10574 build_call_array_loc (location_t loc, tree return_type, tree fn,
10575 int nargs, const tree *args)
10577 tree t;
10578 int i;
10580 t = build_call_1 (return_type, fn, nargs);
10581 for (i = 0; i < nargs; i++)
10582 CALL_EXPR_ARG (t, i) = args[i];
10583 process_call_operands (t);
10584 SET_EXPR_LOCATION (t, loc);
10585 return t;
10588 /* Like build_call_array, but takes a vec. */
10590 tree
10591 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10593 tree ret, t;
10594 unsigned int ix;
10596 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10597 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10598 CALL_EXPR_ARG (ret, ix) = t;
10599 process_call_operands (ret);
10600 return ret;
10603 /* Conveniently construct a function call expression. FNDECL names the
10604 function to be called and N arguments are passed in the array
10605 ARGARRAY. */
10607 tree
10608 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10610 tree fntype = TREE_TYPE (fndecl);
10611 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10613 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10616 /* Conveniently construct a function call expression. FNDECL names the
10617 function to be called and the arguments are passed in the vector
10618 VEC. */
10620 tree
10621 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10623 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10624 vec_safe_address (vec));
10628 /* Conveniently construct a function call expression. FNDECL names the
10629 function to be called, N is the number of arguments, and the "..."
10630 parameters are the argument expressions. */
10632 tree
10633 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10635 va_list ap;
10636 tree *argarray = XALLOCAVEC (tree, n);
10637 int i;
10639 va_start (ap, n);
10640 for (i = 0; i < n; i++)
10641 argarray[i] = va_arg (ap, tree);
10642 va_end (ap);
10643 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10646 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10647 varargs macros aren't supported by all bootstrap compilers. */
10649 tree
10650 build_call_expr (tree fndecl, int n, ...)
10652 va_list ap;
10653 tree *argarray = XALLOCAVEC (tree, n);
10654 int i;
10656 va_start (ap, n);
10657 for (i = 0; i < n; i++)
10658 argarray[i] = va_arg (ap, tree);
10659 va_end (ap);
10660 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10663 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10664 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10665 It will get gimplified later into an ordinary internal function. */
10667 tree
10668 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10669 tree type, int n, const tree *args)
10671 tree t = build_call_1 (type, NULL_TREE, n);
10672 for (int i = 0; i < n; ++i)
10673 CALL_EXPR_ARG (t, i) = args[i];
10674 SET_EXPR_LOCATION (t, loc);
10675 CALL_EXPR_IFN (t) = ifn;
10676 return t;
10679 /* Build internal call expression. This is just like CALL_EXPR, except
10680 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10681 internal function. */
10683 tree
10684 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10685 tree type, int n, ...)
10687 va_list ap;
10688 tree *argarray = XALLOCAVEC (tree, n);
10689 int i;
10691 va_start (ap, n);
10692 for (i = 0; i < n; i++)
10693 argarray[i] = va_arg (ap, tree);
10694 va_end (ap);
10695 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10698 /* Return a function call to FN, if the target is guaranteed to support it,
10699 or null otherwise.
10701 N is the number of arguments, passed in the "...", and TYPE is the
10702 type of the return value. */
10704 tree
10705 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10706 int n, ...)
10708 va_list ap;
10709 tree *argarray = XALLOCAVEC (tree, n);
10710 int i;
10712 va_start (ap, n);
10713 for (i = 0; i < n; i++)
10714 argarray[i] = va_arg (ap, tree);
10715 va_end (ap);
10716 if (internal_fn_p (fn))
10718 internal_fn ifn = as_internal_fn (fn);
10719 if (direct_internal_fn_p (ifn))
10721 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10722 if (!direct_internal_fn_supported_p (ifn, types,
10723 OPTIMIZE_FOR_BOTH))
10724 return NULL_TREE;
10726 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10728 else
10730 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10731 if (!fndecl)
10732 return NULL_TREE;
10733 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10737 /* Return a function call to the appropriate builtin alloca variant.
10739 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10740 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10741 bound for SIZE in case it is not a fixed value. */
10743 tree
10744 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10746 if (max_size >= 0)
10748 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10749 return
10750 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10752 else if (align > 0)
10754 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10755 return build_call_expr (t, 2, size, size_int (align));
10757 else
10759 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10760 return build_call_expr (t, 1, size);
10764 /* Create a new constant string literal and return a char* pointer to it.
10765 The STRING_CST value is the LEN characters at STR. */
10766 tree
10767 build_string_literal (int len, const char *str)
10769 tree t, elem, index, type;
10771 t = build_string (len, str);
10772 elem = build_type_variant (char_type_node, 1, 0);
10773 index = build_index_type (size_int (len - 1));
10774 type = build_array_type (elem, index);
10775 TREE_TYPE (t) = type;
10776 TREE_CONSTANT (t) = 1;
10777 TREE_READONLY (t) = 1;
10778 TREE_STATIC (t) = 1;
10780 type = build_pointer_type (elem);
10781 t = build1 (ADDR_EXPR, type,
10782 build4 (ARRAY_REF, elem,
10783 t, integer_zero_node, NULL_TREE, NULL_TREE));
10784 return t;
10789 /* Return true if T (assumed to be a DECL) must be assigned a memory
10790 location. */
10792 bool
10793 needs_to_live_in_memory (const_tree t)
10795 return (TREE_ADDRESSABLE (t)
10796 || is_global_var (t)
10797 || (TREE_CODE (t) == RESULT_DECL
10798 && !DECL_BY_REFERENCE (t)
10799 && aggregate_value_p (t, current_function_decl)));
10802 /* Return value of a constant X and sign-extend it. */
10804 HOST_WIDE_INT
10805 int_cst_value (const_tree x)
10807 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10808 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10810 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10811 gcc_assert (cst_and_fits_in_hwi (x));
10813 if (bits < HOST_BITS_PER_WIDE_INT)
10815 bool negative = ((val >> (bits - 1)) & 1) != 0;
10816 if (negative)
10817 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10818 else
10819 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10822 return val;
10825 /* If TYPE is an integral or pointer type, return an integer type with
10826 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10827 if TYPE is already an integer type of signedness UNSIGNEDP. */
10829 tree
10830 signed_or_unsigned_type_for (int unsignedp, tree type)
10832 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10833 return type;
10835 if (TREE_CODE (type) == VECTOR_TYPE)
10837 tree inner = TREE_TYPE (type);
10838 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10839 if (!inner2)
10840 return NULL_TREE;
10841 if (inner == inner2)
10842 return type;
10843 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10846 if (!INTEGRAL_TYPE_P (type)
10847 && !POINTER_TYPE_P (type)
10848 && TREE_CODE (type) != OFFSET_TYPE)
10849 return NULL_TREE;
10851 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10854 /* If TYPE is an integral or pointer type, return an integer type with
10855 the same precision which is unsigned, or itself if TYPE is already an
10856 unsigned integer type. */
10858 tree
10859 unsigned_type_for (tree type)
10861 return signed_or_unsigned_type_for (1, type);
10864 /* If TYPE is an integral or pointer type, return an integer type with
10865 the same precision which is signed, or itself if TYPE is already a
10866 signed integer type. */
10868 tree
10869 signed_type_for (tree type)
10871 return signed_or_unsigned_type_for (0, type);
10874 /* If TYPE is a vector type, return a signed integer vector type with the
10875 same width and number of subparts. Otherwise return boolean_type_node. */
10877 tree
10878 truth_type_for (tree type)
10880 if (TREE_CODE (type) == VECTOR_TYPE)
10882 if (VECTOR_BOOLEAN_TYPE_P (type))
10883 return type;
10884 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
10885 GET_MODE_SIZE (TYPE_MODE (type)));
10887 else
10888 return boolean_type_node;
10891 /* Returns the largest value obtainable by casting something in INNER type to
10892 OUTER type. */
10894 tree
10895 upper_bound_in_type (tree outer, tree inner)
10897 unsigned int det = 0;
10898 unsigned oprec = TYPE_PRECISION (outer);
10899 unsigned iprec = TYPE_PRECISION (inner);
10900 unsigned prec;
10902 /* Compute a unique number for every combination. */
10903 det |= (oprec > iprec) ? 4 : 0;
10904 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10905 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10907 /* Determine the exponent to use. */
10908 switch (det)
10910 case 0:
10911 case 1:
10912 /* oprec <= iprec, outer: signed, inner: don't care. */
10913 prec = oprec - 1;
10914 break;
10915 case 2:
10916 case 3:
10917 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10918 prec = oprec;
10919 break;
10920 case 4:
10921 /* oprec > iprec, outer: signed, inner: signed. */
10922 prec = iprec - 1;
10923 break;
10924 case 5:
10925 /* oprec > iprec, outer: signed, inner: unsigned. */
10926 prec = iprec;
10927 break;
10928 case 6:
10929 /* oprec > iprec, outer: unsigned, inner: signed. */
10930 prec = oprec;
10931 break;
10932 case 7:
10933 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10934 prec = iprec;
10935 break;
10936 default:
10937 gcc_unreachable ();
10940 return wide_int_to_tree (outer,
10941 wi::mask (prec, false, TYPE_PRECISION (outer)));
10944 /* Returns the smallest value obtainable by casting something in INNER type to
10945 OUTER type. */
10947 tree
10948 lower_bound_in_type (tree outer, tree inner)
10950 unsigned oprec = TYPE_PRECISION (outer);
10951 unsigned iprec = TYPE_PRECISION (inner);
10953 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10954 and obtain 0. */
10955 if (TYPE_UNSIGNED (outer)
10956 /* If we are widening something of an unsigned type, OUTER type
10957 contains all values of INNER type. In particular, both INNER
10958 and OUTER types have zero in common. */
10959 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10960 return build_int_cst (outer, 0);
10961 else
10963 /* If we are widening a signed type to another signed type, we
10964 want to obtain -2^^(iprec-1). If we are keeping the
10965 precision or narrowing to a signed type, we want to obtain
10966 -2^(oprec-1). */
10967 unsigned prec = oprec > iprec ? iprec : oprec;
10968 return wide_int_to_tree (outer,
10969 wi::mask (prec - 1, true,
10970 TYPE_PRECISION (outer)));
10974 /* Return nonzero if two operands that are suitable for PHI nodes are
10975 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10976 SSA_NAME or invariant. Note that this is strictly an optimization.
10977 That is, callers of this function can directly call operand_equal_p
10978 and get the same result, only slower. */
10981 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10983 if (arg0 == arg1)
10984 return 1;
10985 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10986 return 0;
10987 return operand_equal_p (arg0, arg1, 0);
10990 /* Returns number of zeros at the end of binary representation of X. */
10992 tree
10993 num_ending_zeros (const_tree x)
10995 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10999 #define WALK_SUBTREE(NODE) \
11000 do \
11002 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11003 if (result) \
11004 return result; \
11006 while (0)
11008 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11009 be walked whenever a type is seen in the tree. Rest of operands and return
11010 value are as for walk_tree. */
11012 static tree
11013 walk_type_fields (tree type, walk_tree_fn func, void *data,
11014 hash_set<tree> *pset, walk_tree_lh lh)
11016 tree result = NULL_TREE;
11018 switch (TREE_CODE (type))
11020 case POINTER_TYPE:
11021 case REFERENCE_TYPE:
11022 case VECTOR_TYPE:
11023 /* We have to worry about mutually recursive pointers. These can't
11024 be written in C. They can in Ada. It's pathological, but
11025 there's an ACATS test (c38102a) that checks it. Deal with this
11026 by checking if we're pointing to another pointer, that one
11027 points to another pointer, that one does too, and we have no htab.
11028 If so, get a hash table. We check three levels deep to avoid
11029 the cost of the hash table if we don't need one. */
11030 if (POINTER_TYPE_P (TREE_TYPE (type))
11031 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11032 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11033 && !pset)
11035 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11036 func, data);
11037 if (result)
11038 return result;
11040 break;
11043 /* fall through */
11045 case COMPLEX_TYPE:
11046 WALK_SUBTREE (TREE_TYPE (type));
11047 break;
11049 case METHOD_TYPE:
11050 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11052 /* Fall through. */
11054 case FUNCTION_TYPE:
11055 WALK_SUBTREE (TREE_TYPE (type));
11057 tree arg;
11059 /* We never want to walk into default arguments. */
11060 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11061 WALK_SUBTREE (TREE_VALUE (arg));
11063 break;
11065 case ARRAY_TYPE:
11066 /* Don't follow this nodes's type if a pointer for fear that
11067 we'll have infinite recursion. If we have a PSET, then we
11068 need not fear. */
11069 if (pset
11070 || (!POINTER_TYPE_P (TREE_TYPE (type))
11071 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11072 WALK_SUBTREE (TREE_TYPE (type));
11073 WALK_SUBTREE (TYPE_DOMAIN (type));
11074 break;
11076 case OFFSET_TYPE:
11077 WALK_SUBTREE (TREE_TYPE (type));
11078 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11079 break;
11081 default:
11082 break;
11085 return NULL_TREE;
11088 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11089 called with the DATA and the address of each sub-tree. If FUNC returns a
11090 non-NULL value, the traversal is stopped, and the value returned by FUNC
11091 is returned. If PSET is non-NULL it is used to record the nodes visited,
11092 and to avoid visiting a node more than once. */
11094 tree
11095 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11096 hash_set<tree> *pset, walk_tree_lh lh)
11098 enum tree_code code;
11099 int walk_subtrees;
11100 tree result;
11102 #define WALK_SUBTREE_TAIL(NODE) \
11103 do \
11105 tp = & (NODE); \
11106 goto tail_recurse; \
11108 while (0)
11110 tail_recurse:
11111 /* Skip empty subtrees. */
11112 if (!*tp)
11113 return NULL_TREE;
11115 /* Don't walk the same tree twice, if the user has requested
11116 that we avoid doing so. */
11117 if (pset && pset->add (*tp))
11118 return NULL_TREE;
11120 /* Call the function. */
11121 walk_subtrees = 1;
11122 result = (*func) (tp, &walk_subtrees, data);
11124 /* If we found something, return it. */
11125 if (result)
11126 return result;
11128 code = TREE_CODE (*tp);
11130 /* Even if we didn't, FUNC may have decided that there was nothing
11131 interesting below this point in the tree. */
11132 if (!walk_subtrees)
11134 /* But we still need to check our siblings. */
11135 if (code == TREE_LIST)
11136 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11137 else if (code == OMP_CLAUSE)
11138 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11139 else
11140 return NULL_TREE;
11143 if (lh)
11145 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11146 if (result || !walk_subtrees)
11147 return result;
11150 switch (code)
11152 case ERROR_MARK:
11153 case IDENTIFIER_NODE:
11154 case INTEGER_CST:
11155 case REAL_CST:
11156 case FIXED_CST:
11157 case VECTOR_CST:
11158 case STRING_CST:
11159 case BLOCK:
11160 case PLACEHOLDER_EXPR:
11161 case SSA_NAME:
11162 case FIELD_DECL:
11163 case RESULT_DECL:
11164 /* None of these have subtrees other than those already walked
11165 above. */
11166 break;
11168 case TREE_LIST:
11169 WALK_SUBTREE (TREE_VALUE (*tp));
11170 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11171 break;
11173 case TREE_VEC:
11175 int len = TREE_VEC_LENGTH (*tp);
11177 if (len == 0)
11178 break;
11180 /* Walk all elements but the first. */
11181 while (--len)
11182 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11184 /* Now walk the first one as a tail call. */
11185 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11188 case COMPLEX_CST:
11189 WALK_SUBTREE (TREE_REALPART (*tp));
11190 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11192 case CONSTRUCTOR:
11194 unsigned HOST_WIDE_INT idx;
11195 constructor_elt *ce;
11197 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11198 idx++)
11199 WALK_SUBTREE (ce->value);
11201 break;
11203 case SAVE_EXPR:
11204 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11206 case BIND_EXPR:
11208 tree decl;
11209 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11211 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11212 into declarations that are just mentioned, rather than
11213 declared; they don't really belong to this part of the tree.
11214 And, we can see cycles: the initializer for a declaration
11215 can refer to the declaration itself. */
11216 WALK_SUBTREE (DECL_INITIAL (decl));
11217 WALK_SUBTREE (DECL_SIZE (decl));
11218 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11220 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11223 case STATEMENT_LIST:
11225 tree_stmt_iterator i;
11226 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11227 WALK_SUBTREE (*tsi_stmt_ptr (i));
11229 break;
11231 case OMP_CLAUSE:
11232 switch (OMP_CLAUSE_CODE (*tp))
11234 case OMP_CLAUSE_GANG:
11235 case OMP_CLAUSE__GRIDDIM_:
11236 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11237 /* FALLTHRU */
11239 case OMP_CLAUSE_ASYNC:
11240 case OMP_CLAUSE_WAIT:
11241 case OMP_CLAUSE_WORKER:
11242 case OMP_CLAUSE_VECTOR:
11243 case OMP_CLAUSE_NUM_GANGS:
11244 case OMP_CLAUSE_NUM_WORKERS:
11245 case OMP_CLAUSE_VECTOR_LENGTH:
11246 case OMP_CLAUSE_PRIVATE:
11247 case OMP_CLAUSE_SHARED:
11248 case OMP_CLAUSE_FIRSTPRIVATE:
11249 case OMP_CLAUSE_COPYIN:
11250 case OMP_CLAUSE_COPYPRIVATE:
11251 case OMP_CLAUSE_FINAL:
11252 case OMP_CLAUSE_IF:
11253 case OMP_CLAUSE_NUM_THREADS:
11254 case OMP_CLAUSE_SCHEDULE:
11255 case OMP_CLAUSE_UNIFORM:
11256 case OMP_CLAUSE_DEPEND:
11257 case OMP_CLAUSE_NUM_TEAMS:
11258 case OMP_CLAUSE_THREAD_LIMIT:
11259 case OMP_CLAUSE_DEVICE:
11260 case OMP_CLAUSE_DIST_SCHEDULE:
11261 case OMP_CLAUSE_SAFELEN:
11262 case OMP_CLAUSE_SIMDLEN:
11263 case OMP_CLAUSE_ORDERED:
11264 case OMP_CLAUSE_PRIORITY:
11265 case OMP_CLAUSE_GRAINSIZE:
11266 case OMP_CLAUSE_NUM_TASKS:
11267 case OMP_CLAUSE_HINT:
11268 case OMP_CLAUSE_TO_DECLARE:
11269 case OMP_CLAUSE_LINK:
11270 case OMP_CLAUSE_USE_DEVICE_PTR:
11271 case OMP_CLAUSE_IS_DEVICE_PTR:
11272 case OMP_CLAUSE__LOOPTEMP_:
11273 case OMP_CLAUSE__SIMDUID_:
11274 case OMP_CLAUSE__CILK_FOR_COUNT_:
11275 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11276 /* FALLTHRU */
11278 case OMP_CLAUSE_INDEPENDENT:
11279 case OMP_CLAUSE_NOWAIT:
11280 case OMP_CLAUSE_DEFAULT:
11281 case OMP_CLAUSE_UNTIED:
11282 case OMP_CLAUSE_MERGEABLE:
11283 case OMP_CLAUSE_PROC_BIND:
11284 case OMP_CLAUSE_INBRANCH:
11285 case OMP_CLAUSE_NOTINBRANCH:
11286 case OMP_CLAUSE_FOR:
11287 case OMP_CLAUSE_PARALLEL:
11288 case OMP_CLAUSE_SECTIONS:
11289 case OMP_CLAUSE_TASKGROUP:
11290 case OMP_CLAUSE_NOGROUP:
11291 case OMP_CLAUSE_THREADS:
11292 case OMP_CLAUSE_SIMD:
11293 case OMP_CLAUSE_DEFAULTMAP:
11294 case OMP_CLAUSE_AUTO:
11295 case OMP_CLAUSE_SEQ:
11296 case OMP_CLAUSE_TILE:
11297 case OMP_CLAUSE__SIMT_:
11298 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11300 case OMP_CLAUSE_LASTPRIVATE:
11301 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11302 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11303 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11305 case OMP_CLAUSE_COLLAPSE:
11307 int i;
11308 for (i = 0; i < 3; i++)
11309 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11310 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11313 case OMP_CLAUSE_LINEAR:
11314 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11315 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11316 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11317 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11319 case OMP_CLAUSE_ALIGNED:
11320 case OMP_CLAUSE_FROM:
11321 case OMP_CLAUSE_TO:
11322 case OMP_CLAUSE_MAP:
11323 case OMP_CLAUSE__CACHE_:
11324 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11325 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11326 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11328 case OMP_CLAUSE_REDUCTION:
11330 int i;
11331 for (i = 0; i < 5; i++)
11332 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11333 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11336 default:
11337 gcc_unreachable ();
11339 break;
11341 case TARGET_EXPR:
11343 int i, len;
11345 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11346 But, we only want to walk once. */
11347 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11348 for (i = 0; i < len; ++i)
11349 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11350 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11353 case DECL_EXPR:
11354 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11355 defining. We only want to walk into these fields of a type in this
11356 case and not in the general case of a mere reference to the type.
11358 The criterion is as follows: if the field can be an expression, it
11359 must be walked only here. This should be in keeping with the fields
11360 that are directly gimplified in gimplify_type_sizes in order for the
11361 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11362 variable-sized types.
11364 Note that DECLs get walked as part of processing the BIND_EXPR. */
11365 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11367 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11368 if (TREE_CODE (*type_p) == ERROR_MARK)
11369 return NULL_TREE;
11371 /* Call the function for the type. See if it returns anything or
11372 doesn't want us to continue. If we are to continue, walk both
11373 the normal fields and those for the declaration case. */
11374 result = (*func) (type_p, &walk_subtrees, data);
11375 if (result || !walk_subtrees)
11376 return result;
11378 /* But do not walk a pointed-to type since it may itself need to
11379 be walked in the declaration case if it isn't anonymous. */
11380 if (!POINTER_TYPE_P (*type_p))
11382 result = walk_type_fields (*type_p, func, data, pset, lh);
11383 if (result)
11384 return result;
11387 /* If this is a record type, also walk the fields. */
11388 if (RECORD_OR_UNION_TYPE_P (*type_p))
11390 tree field;
11392 for (field = TYPE_FIELDS (*type_p); field;
11393 field = DECL_CHAIN (field))
11395 /* We'd like to look at the type of the field, but we can
11396 easily get infinite recursion. So assume it's pointed
11397 to elsewhere in the tree. Also, ignore things that
11398 aren't fields. */
11399 if (TREE_CODE (field) != FIELD_DECL)
11400 continue;
11402 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11403 WALK_SUBTREE (DECL_SIZE (field));
11404 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11405 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11406 WALK_SUBTREE (DECL_QUALIFIER (field));
11410 /* Same for scalar types. */
11411 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11412 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11413 || TREE_CODE (*type_p) == INTEGER_TYPE
11414 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11415 || TREE_CODE (*type_p) == REAL_TYPE)
11417 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11418 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11421 WALK_SUBTREE (TYPE_SIZE (*type_p));
11422 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11424 /* FALLTHRU */
11426 default:
11427 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11429 int i, len;
11431 /* Walk over all the sub-trees of this operand. */
11432 len = TREE_OPERAND_LENGTH (*tp);
11434 /* Go through the subtrees. We need to do this in forward order so
11435 that the scope of a FOR_EXPR is handled properly. */
11436 if (len)
11438 for (i = 0; i < len - 1; ++i)
11439 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11440 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11443 /* If this is a type, walk the needed fields in the type. */
11444 else if (TYPE_P (*tp))
11445 return walk_type_fields (*tp, func, data, pset, lh);
11446 break;
11449 /* We didn't find what we were looking for. */
11450 return NULL_TREE;
11452 #undef WALK_SUBTREE_TAIL
11454 #undef WALK_SUBTREE
11456 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11458 tree
11459 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11460 walk_tree_lh lh)
11462 tree result;
11464 hash_set<tree> pset;
11465 result = walk_tree_1 (tp, func, data, &pset, lh);
11466 return result;
11470 tree
11471 tree_block (tree t)
11473 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11475 if (IS_EXPR_CODE_CLASS (c))
11476 return LOCATION_BLOCK (t->exp.locus);
11477 gcc_unreachable ();
11478 return NULL;
11481 void
11482 tree_set_block (tree t, tree b)
11484 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11486 if (IS_EXPR_CODE_CLASS (c))
11488 t->exp.locus = set_block (t->exp.locus, b);
11490 else
11491 gcc_unreachable ();
11494 /* Create a nameless artificial label and put it in the current
11495 function context. The label has a location of LOC. Returns the
11496 newly created label. */
11498 tree
11499 create_artificial_label (location_t loc)
11501 tree lab = build_decl (loc,
11502 LABEL_DECL, NULL_TREE, void_type_node);
11504 DECL_ARTIFICIAL (lab) = 1;
11505 DECL_IGNORED_P (lab) = 1;
11506 DECL_CONTEXT (lab) = current_function_decl;
11507 return lab;
11510 /* Given a tree, try to return a useful variable name that we can use
11511 to prefix a temporary that is being assigned the value of the tree.
11512 I.E. given <temp> = &A, return A. */
11514 const char *
11515 get_name (tree t)
11517 tree stripped_decl;
11519 stripped_decl = t;
11520 STRIP_NOPS (stripped_decl);
11521 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11522 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11523 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11525 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11526 if (!name)
11527 return NULL;
11528 return IDENTIFIER_POINTER (name);
11530 else
11532 switch (TREE_CODE (stripped_decl))
11534 case ADDR_EXPR:
11535 return get_name (TREE_OPERAND (stripped_decl, 0));
11536 default:
11537 return NULL;
11542 /* Return true if TYPE has a variable argument list. */
11544 bool
11545 stdarg_p (const_tree fntype)
11547 function_args_iterator args_iter;
11548 tree n = NULL_TREE, t;
11550 if (!fntype)
11551 return false;
11553 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11555 n = t;
11558 return n != NULL_TREE && n != void_type_node;
11561 /* Return true if TYPE has a prototype. */
11563 bool
11564 prototype_p (const_tree fntype)
11566 tree t;
11568 gcc_assert (fntype != NULL_TREE);
11570 t = TYPE_ARG_TYPES (fntype);
11571 return (t != NULL_TREE);
11574 /* If BLOCK is inlined from an __attribute__((__artificial__))
11575 routine, return pointer to location from where it has been
11576 called. */
11577 location_t *
11578 block_nonartificial_location (tree block)
11580 location_t *ret = NULL;
11582 while (block && TREE_CODE (block) == BLOCK
11583 && BLOCK_ABSTRACT_ORIGIN (block))
11585 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11587 while (TREE_CODE (ao) == BLOCK
11588 && BLOCK_ABSTRACT_ORIGIN (ao)
11589 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11590 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11592 if (TREE_CODE (ao) == FUNCTION_DECL)
11594 /* If AO is an artificial inline, point RET to the
11595 call site locus at which it has been inlined and continue
11596 the loop, in case AO's caller is also an artificial
11597 inline. */
11598 if (DECL_DECLARED_INLINE_P (ao)
11599 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11600 ret = &BLOCK_SOURCE_LOCATION (block);
11601 else
11602 break;
11604 else if (TREE_CODE (ao) != BLOCK)
11605 break;
11607 block = BLOCK_SUPERCONTEXT (block);
11609 return ret;
11613 /* If EXP is inlined from an __attribute__((__artificial__))
11614 function, return the location of the original call expression. */
11616 location_t
11617 tree_nonartificial_location (tree exp)
11619 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11621 if (loc)
11622 return *loc;
11623 else
11624 return EXPR_LOCATION (exp);
11628 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11629 nodes. */
11631 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11633 hashval_t
11634 cl_option_hasher::hash (tree x)
11636 const_tree const t = x;
11637 const char *p;
11638 size_t i;
11639 size_t len = 0;
11640 hashval_t hash = 0;
11642 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11644 p = (const char *)TREE_OPTIMIZATION (t);
11645 len = sizeof (struct cl_optimization);
11648 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11649 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11651 else
11652 gcc_unreachable ();
11654 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11655 something else. */
11656 for (i = 0; i < len; i++)
11657 if (p[i])
11658 hash = (hash << 4) ^ ((i << 2) | p[i]);
11660 return hash;
11663 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11664 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11665 same. */
11667 bool
11668 cl_option_hasher::equal (tree x, tree y)
11670 const_tree const xt = x;
11671 const_tree const yt = y;
11672 const char *xp;
11673 const char *yp;
11674 size_t len;
11676 if (TREE_CODE (xt) != TREE_CODE (yt))
11677 return 0;
11679 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11681 xp = (const char *)TREE_OPTIMIZATION (xt);
11682 yp = (const char *)TREE_OPTIMIZATION (yt);
11683 len = sizeof (struct cl_optimization);
11686 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11688 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11689 TREE_TARGET_OPTION (yt));
11692 else
11693 gcc_unreachable ();
11695 return (memcmp (xp, yp, len) == 0);
11698 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11700 tree
11701 build_optimization_node (struct gcc_options *opts)
11703 tree t;
11705 /* Use the cache of optimization nodes. */
11707 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11708 opts);
11710 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11711 t = *slot;
11712 if (!t)
11714 /* Insert this one into the hash table. */
11715 t = cl_optimization_node;
11716 *slot = t;
11718 /* Make a new node for next time round. */
11719 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11722 return t;
11725 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11727 tree
11728 build_target_option_node (struct gcc_options *opts)
11730 tree t;
11732 /* Use the cache of optimization nodes. */
11734 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11735 opts);
11737 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11738 t = *slot;
11739 if (!t)
11741 /* Insert this one into the hash table. */
11742 t = cl_target_option_node;
11743 *slot = t;
11745 /* Make a new node for next time round. */
11746 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11749 return t;
11752 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11753 so that they aren't saved during PCH writing. */
11755 void
11756 prepare_target_option_nodes_for_pch (void)
11758 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11759 for (; iter != cl_option_hash_table->end (); ++iter)
11760 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11761 TREE_TARGET_GLOBALS (*iter) = NULL;
11764 /* Determine the "ultimate origin" of a block. The block may be an inlined
11765 instance of an inlined instance of a block which is local to an inline
11766 function, so we have to trace all of the way back through the origin chain
11767 to find out what sort of node actually served as the original seed for the
11768 given block. */
11770 tree
11771 block_ultimate_origin (const_tree block)
11773 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11775 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11776 we're trying to output the abstract instance of this function. */
11777 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11778 return NULL_TREE;
11780 if (immediate_origin == NULL_TREE)
11781 return NULL_TREE;
11782 else
11784 tree ret_val;
11785 tree lookahead = immediate_origin;
11789 ret_val = lookahead;
11790 lookahead = (TREE_CODE (ret_val) == BLOCK
11791 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11793 while (lookahead != NULL && lookahead != ret_val);
11795 /* The block's abstract origin chain may not be the *ultimate* origin of
11796 the block. It could lead to a DECL that has an abstract origin set.
11797 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11798 will give us if it has one). Note that DECL's abstract origins are
11799 supposed to be the most distant ancestor (or so decl_ultimate_origin
11800 claims), so we don't need to loop following the DECL origins. */
11801 if (DECL_P (ret_val))
11802 return DECL_ORIGIN (ret_val);
11804 return ret_val;
11808 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11809 no instruction. */
11811 bool
11812 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11814 /* Do not strip casts into or out of differing address spaces. */
11815 if (POINTER_TYPE_P (outer_type)
11816 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11818 if (!POINTER_TYPE_P (inner_type)
11819 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11820 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11821 return false;
11823 else if (POINTER_TYPE_P (inner_type)
11824 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11826 /* We already know that outer_type is not a pointer with
11827 a non-generic address space. */
11828 return false;
11831 /* Use precision rather then machine mode when we can, which gives
11832 the correct answer even for submode (bit-field) types. */
11833 if ((INTEGRAL_TYPE_P (outer_type)
11834 || POINTER_TYPE_P (outer_type)
11835 || TREE_CODE (outer_type) == OFFSET_TYPE)
11836 && (INTEGRAL_TYPE_P (inner_type)
11837 || POINTER_TYPE_P (inner_type)
11838 || TREE_CODE (inner_type) == OFFSET_TYPE))
11839 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11841 /* Otherwise fall back on comparing machine modes (e.g. for
11842 aggregate types, floats). */
11843 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11846 /* Return true iff conversion in EXP generates no instruction. Mark
11847 it inline so that we fully inline into the stripping functions even
11848 though we have two uses of this function. */
11850 static inline bool
11851 tree_nop_conversion (const_tree exp)
11853 tree outer_type, inner_type;
11855 if (!CONVERT_EXPR_P (exp)
11856 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11857 return false;
11858 if (TREE_OPERAND (exp, 0) == error_mark_node)
11859 return false;
11861 outer_type = TREE_TYPE (exp);
11862 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11864 if (!inner_type)
11865 return false;
11867 return tree_nop_conversion_p (outer_type, inner_type);
11870 /* Return true iff conversion in EXP generates no instruction. Don't
11871 consider conversions changing the signedness. */
11873 static bool
11874 tree_sign_nop_conversion (const_tree exp)
11876 tree outer_type, inner_type;
11878 if (!tree_nop_conversion (exp))
11879 return false;
11881 outer_type = TREE_TYPE (exp);
11882 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11884 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11885 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11888 /* Strip conversions from EXP according to tree_nop_conversion and
11889 return the resulting expression. */
11891 tree
11892 tree_strip_nop_conversions (tree exp)
11894 while (tree_nop_conversion (exp))
11895 exp = TREE_OPERAND (exp, 0);
11896 return exp;
11899 /* Strip conversions from EXP according to tree_sign_nop_conversion
11900 and return the resulting expression. */
11902 tree
11903 tree_strip_sign_nop_conversions (tree exp)
11905 while (tree_sign_nop_conversion (exp))
11906 exp = TREE_OPERAND (exp, 0);
11907 return exp;
11910 /* Avoid any floating point extensions from EXP. */
11911 tree
11912 strip_float_extensions (tree exp)
11914 tree sub, expt, subt;
11916 /* For floating point constant look up the narrowest type that can hold
11917 it properly and handle it like (type)(narrowest_type)constant.
11918 This way we can optimize for instance a=a*2.0 where "a" is float
11919 but 2.0 is double constant. */
11920 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11922 REAL_VALUE_TYPE orig;
11923 tree type = NULL;
11925 orig = TREE_REAL_CST (exp);
11926 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11927 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11928 type = float_type_node;
11929 else if (TYPE_PRECISION (TREE_TYPE (exp))
11930 > TYPE_PRECISION (double_type_node)
11931 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11932 type = double_type_node;
11933 if (type)
11934 return build_real_truncate (type, orig);
11937 if (!CONVERT_EXPR_P (exp))
11938 return exp;
11940 sub = TREE_OPERAND (exp, 0);
11941 subt = TREE_TYPE (sub);
11942 expt = TREE_TYPE (exp);
11944 if (!FLOAT_TYPE_P (subt))
11945 return exp;
11947 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11948 return exp;
11950 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11951 return exp;
11953 return strip_float_extensions (sub);
11956 /* Strip out all handled components that produce invariant
11957 offsets. */
11959 const_tree
11960 strip_invariant_refs (const_tree op)
11962 while (handled_component_p (op))
11964 switch (TREE_CODE (op))
11966 case ARRAY_REF:
11967 case ARRAY_RANGE_REF:
11968 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11969 || TREE_OPERAND (op, 2) != NULL_TREE
11970 || TREE_OPERAND (op, 3) != NULL_TREE)
11971 return NULL;
11972 break;
11974 case COMPONENT_REF:
11975 if (TREE_OPERAND (op, 2) != NULL_TREE)
11976 return NULL;
11977 break;
11979 default:;
11981 op = TREE_OPERAND (op, 0);
11984 return op;
11987 static GTY(()) tree gcc_eh_personality_decl;
11989 /* Return the GCC personality function decl. */
11991 tree
11992 lhd_gcc_personality (void)
11994 if (!gcc_eh_personality_decl)
11995 gcc_eh_personality_decl = build_personality_function ("gcc");
11996 return gcc_eh_personality_decl;
11999 /* TARGET is a call target of GIMPLE call statement
12000 (obtained by gimple_call_fn). Return true if it is
12001 OBJ_TYPE_REF representing an virtual call of C++ method.
12002 (As opposed to OBJ_TYPE_REF representing objc calls
12003 through a cast where middle-end devirtualization machinery
12004 can't apply.) */
12006 bool
12007 virtual_method_call_p (const_tree target)
12009 if (TREE_CODE (target) != OBJ_TYPE_REF)
12010 return false;
12011 tree t = TREE_TYPE (target);
12012 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12013 t = TREE_TYPE (t);
12014 if (TREE_CODE (t) == FUNCTION_TYPE)
12015 return false;
12016 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12017 /* If we do not have BINFO associated, it means that type was built
12018 without devirtualization enabled. Do not consider this a virtual
12019 call. */
12020 if (!TYPE_BINFO (obj_type_ref_class (target)))
12021 return false;
12022 return true;
12025 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12027 tree
12028 obj_type_ref_class (const_tree ref)
12030 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12031 ref = TREE_TYPE (ref);
12032 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12033 ref = TREE_TYPE (ref);
12034 /* We look for type THIS points to. ObjC also builds
12035 OBJ_TYPE_REF with non-method calls, Their first parameter
12036 ID however also corresponds to class type. */
12037 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12038 || TREE_CODE (ref) == FUNCTION_TYPE);
12039 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12040 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12041 return TREE_TYPE (ref);
12044 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12046 static tree
12047 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12049 unsigned int i;
12050 tree base_binfo, b;
12052 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12053 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12054 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12055 return base_binfo;
12056 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12057 return b;
12058 return NULL;
12061 /* Try to find a base info of BINFO that would have its field decl at offset
12062 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12063 found, return, otherwise return NULL_TREE. */
12065 tree
12066 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12068 tree type = BINFO_TYPE (binfo);
12070 while (true)
12072 HOST_WIDE_INT pos, size;
12073 tree fld;
12074 int i;
12076 if (types_same_for_odr (type, expected_type))
12077 return binfo;
12078 if (offset < 0)
12079 return NULL_TREE;
12081 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12083 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12084 continue;
12086 pos = int_bit_position (fld);
12087 size = tree_to_uhwi (DECL_SIZE (fld));
12088 if (pos <= offset && (pos + size) > offset)
12089 break;
12091 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12092 return NULL_TREE;
12094 /* Offset 0 indicates the primary base, whose vtable contents are
12095 represented in the binfo for the derived class. */
12096 else if (offset != 0)
12098 tree found_binfo = NULL, base_binfo;
12099 /* Offsets in BINFO are in bytes relative to the whole structure
12100 while POS is in bits relative to the containing field. */
12101 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12102 / BITS_PER_UNIT);
12104 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12105 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12106 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12108 found_binfo = base_binfo;
12109 break;
12111 if (found_binfo)
12112 binfo = found_binfo;
12113 else
12114 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12115 binfo_offset);
12118 type = TREE_TYPE (fld);
12119 offset -= pos;
12123 /* Returns true if X is a typedef decl. */
12125 bool
12126 is_typedef_decl (const_tree x)
12128 return (x && TREE_CODE (x) == TYPE_DECL
12129 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12132 /* Returns true iff TYPE is a type variant created for a typedef. */
12134 bool
12135 typedef_variant_p (const_tree type)
12137 return is_typedef_decl (TYPE_NAME (type));
12140 /* Warn about a use of an identifier which was marked deprecated. */
12141 void
12142 warn_deprecated_use (tree node, tree attr)
12144 const char *msg;
12146 if (node == 0 || !warn_deprecated_decl)
12147 return;
12149 if (!attr)
12151 if (DECL_P (node))
12152 attr = DECL_ATTRIBUTES (node);
12153 else if (TYPE_P (node))
12155 tree decl = TYPE_STUB_DECL (node);
12156 if (decl)
12157 attr = lookup_attribute ("deprecated",
12158 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12162 if (attr)
12163 attr = lookup_attribute ("deprecated", attr);
12165 if (attr)
12166 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12167 else
12168 msg = NULL;
12170 bool w;
12171 if (DECL_P (node))
12173 if (msg)
12174 w = warning (OPT_Wdeprecated_declarations,
12175 "%qD is deprecated: %s", node, msg);
12176 else
12177 w = warning (OPT_Wdeprecated_declarations,
12178 "%qD is deprecated", node);
12179 if (w)
12180 inform (DECL_SOURCE_LOCATION (node), "declared here");
12182 else if (TYPE_P (node))
12184 tree what = NULL_TREE;
12185 tree decl = TYPE_STUB_DECL (node);
12187 if (TYPE_NAME (node))
12189 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12190 what = TYPE_NAME (node);
12191 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12192 && DECL_NAME (TYPE_NAME (node)))
12193 what = DECL_NAME (TYPE_NAME (node));
12196 if (decl)
12198 if (what)
12200 if (msg)
12201 w = warning (OPT_Wdeprecated_declarations,
12202 "%qE is deprecated: %s", what, msg);
12203 else
12204 w = warning (OPT_Wdeprecated_declarations,
12205 "%qE is deprecated", what);
12207 else
12209 if (msg)
12210 w = warning (OPT_Wdeprecated_declarations,
12211 "type is deprecated: %s", msg);
12212 else
12213 w = warning (OPT_Wdeprecated_declarations,
12214 "type is deprecated");
12216 if (w)
12217 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12219 else
12221 if (what)
12223 if (msg)
12224 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12225 what, msg);
12226 else
12227 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12229 else
12231 if (msg)
12232 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12233 msg);
12234 else
12235 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12241 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12242 somewhere in it. */
12244 bool
12245 contains_bitfld_component_ref_p (const_tree ref)
12247 while (handled_component_p (ref))
12249 if (TREE_CODE (ref) == COMPONENT_REF
12250 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12251 return true;
12252 ref = TREE_OPERAND (ref, 0);
12255 return false;
12258 /* Try to determine whether a TRY_CATCH expression can fall through.
12259 This is a subroutine of block_may_fallthru. */
12261 static bool
12262 try_catch_may_fallthru (const_tree stmt)
12264 tree_stmt_iterator i;
12266 /* If the TRY block can fall through, the whole TRY_CATCH can
12267 fall through. */
12268 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12269 return true;
12271 i = tsi_start (TREE_OPERAND (stmt, 1));
12272 switch (TREE_CODE (tsi_stmt (i)))
12274 case CATCH_EXPR:
12275 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12276 catch expression and a body. The whole TRY_CATCH may fall
12277 through iff any of the catch bodies falls through. */
12278 for (; !tsi_end_p (i); tsi_next (&i))
12280 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12281 return true;
12283 return false;
12285 case EH_FILTER_EXPR:
12286 /* The exception filter expression only matters if there is an
12287 exception. If the exception does not match EH_FILTER_TYPES,
12288 we will execute EH_FILTER_FAILURE, and we will fall through
12289 if that falls through. If the exception does match
12290 EH_FILTER_TYPES, the stack unwinder will continue up the
12291 stack, so we will not fall through. We don't know whether we
12292 will throw an exception which matches EH_FILTER_TYPES or not,
12293 so we just ignore EH_FILTER_TYPES and assume that we might
12294 throw an exception which doesn't match. */
12295 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12297 default:
12298 /* This case represents statements to be executed when an
12299 exception occurs. Those statements are implicitly followed
12300 by a RESX statement to resume execution after the exception.
12301 So in this case the TRY_CATCH never falls through. */
12302 return false;
12306 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12307 need not be 100% accurate; simply be conservative and return true if we
12308 don't know. This is used only to avoid stupidly generating extra code.
12309 If we're wrong, we'll just delete the extra code later. */
12311 bool
12312 block_may_fallthru (const_tree block)
12314 /* This CONST_CAST is okay because expr_last returns its argument
12315 unmodified and we assign it to a const_tree. */
12316 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12318 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12320 case GOTO_EXPR:
12321 case RETURN_EXPR:
12322 /* Easy cases. If the last statement of the block implies
12323 control transfer, then we can't fall through. */
12324 return false;
12326 case SWITCH_EXPR:
12327 /* If SWITCH_LABELS is set, this is lowered, and represents a
12328 branch to a selected label and hence can not fall through.
12329 Otherwise SWITCH_BODY is set, and the switch can fall
12330 through. */
12331 return SWITCH_LABELS (stmt) == NULL_TREE;
12333 case COND_EXPR:
12334 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12335 return true;
12336 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12338 case BIND_EXPR:
12339 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12341 case TRY_CATCH_EXPR:
12342 return try_catch_may_fallthru (stmt);
12344 case TRY_FINALLY_EXPR:
12345 /* The finally clause is always executed after the try clause,
12346 so if it does not fall through, then the try-finally will not
12347 fall through. Otherwise, if the try clause does not fall
12348 through, then when the finally clause falls through it will
12349 resume execution wherever the try clause was going. So the
12350 whole try-finally will only fall through if both the try
12351 clause and the finally clause fall through. */
12352 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12353 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12355 case MODIFY_EXPR:
12356 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12357 stmt = TREE_OPERAND (stmt, 1);
12358 else
12359 return true;
12360 /* FALLTHRU */
12362 case CALL_EXPR:
12363 /* Functions that do not return do not fall through. */
12364 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12366 case CLEANUP_POINT_EXPR:
12367 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12369 case TARGET_EXPR:
12370 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12372 case ERROR_MARK:
12373 return true;
12375 default:
12376 return lang_hooks.block_may_fallthru (stmt);
12380 /* True if we are using EH to handle cleanups. */
12381 static bool using_eh_for_cleanups_flag = false;
12383 /* This routine is called from front ends to indicate eh should be used for
12384 cleanups. */
12385 void
12386 using_eh_for_cleanups (void)
12388 using_eh_for_cleanups_flag = true;
12391 /* Query whether EH is used for cleanups. */
12392 bool
12393 using_eh_for_cleanups_p (void)
12395 return using_eh_for_cleanups_flag;
12398 /* Wrapper for tree_code_name to ensure that tree code is valid */
12399 const char *
12400 get_tree_code_name (enum tree_code code)
12402 const char *invalid = "<invalid tree code>";
12404 if (code >= MAX_TREE_CODES)
12405 return invalid;
12407 return tree_code_name[code];
12410 /* Drops the TREE_OVERFLOW flag from T. */
12412 tree
12413 drop_tree_overflow (tree t)
12415 gcc_checking_assert (TREE_OVERFLOW (t));
12417 /* For tree codes with a sharing machinery re-build the result. */
12418 if (TREE_CODE (t) == INTEGER_CST)
12419 return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
12421 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12422 and drop the flag. */
12423 t = copy_node (t);
12424 TREE_OVERFLOW (t) = 0;
12426 /* For constants that contain nested constants, drop the flag
12427 from those as well. */
12428 if (TREE_CODE (t) == COMPLEX_CST)
12430 if (TREE_OVERFLOW (TREE_REALPART (t)))
12431 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12432 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12433 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12435 if (TREE_CODE (t) == VECTOR_CST)
12437 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
12439 tree& elt = VECTOR_CST_ELT (t, i);
12440 if (TREE_OVERFLOW (elt))
12441 elt = drop_tree_overflow (elt);
12444 return t;
12447 /* Given a memory reference expression T, return its base address.
12448 The base address of a memory reference expression is the main
12449 object being referenced. For instance, the base address for
12450 'array[i].fld[j]' is 'array'. You can think of this as stripping
12451 away the offset part from a memory address.
12453 This function calls handled_component_p to strip away all the inner
12454 parts of the memory reference until it reaches the base object. */
12456 tree
12457 get_base_address (tree t)
12459 while (handled_component_p (t))
12460 t = TREE_OPERAND (t, 0);
12462 if ((TREE_CODE (t) == MEM_REF
12463 || TREE_CODE (t) == TARGET_MEM_REF)
12464 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12465 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12467 /* ??? Either the alias oracle or all callers need to properly deal
12468 with WITH_SIZE_EXPRs before we can look through those. */
12469 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12470 return NULL_TREE;
12472 return t;
12475 /* Return a tree of sizetype representing the size, in bytes, of the element
12476 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12478 tree
12479 array_ref_element_size (tree exp)
12481 tree aligned_size = TREE_OPERAND (exp, 3);
12482 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12483 location_t loc = EXPR_LOCATION (exp);
12485 /* If a size was specified in the ARRAY_REF, it's the size measured
12486 in alignment units of the element type. So multiply by that value. */
12487 if (aligned_size)
12489 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12490 sizetype from another type of the same width and signedness. */
12491 if (TREE_TYPE (aligned_size) != sizetype)
12492 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12493 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12494 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12497 /* Otherwise, take the size from that of the element type. Substitute
12498 any PLACEHOLDER_EXPR that we have. */
12499 else
12500 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12503 /* Return a tree representing the lower bound of the array mentioned in
12504 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12506 tree
12507 array_ref_low_bound (tree exp)
12509 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12511 /* If a lower bound is specified in EXP, use it. */
12512 if (TREE_OPERAND (exp, 2))
12513 return TREE_OPERAND (exp, 2);
12515 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12516 substituting for a PLACEHOLDER_EXPR as needed. */
12517 if (domain_type && TYPE_MIN_VALUE (domain_type))
12518 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12520 /* Otherwise, return a zero of the appropriate type. */
12521 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12524 /* Return a tree representing the upper bound of the array mentioned in
12525 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12527 tree
12528 array_ref_up_bound (tree exp)
12530 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12532 /* If there is a domain type and it has an upper bound, use it, substituting
12533 for a PLACEHOLDER_EXPR as needed. */
12534 if (domain_type && TYPE_MAX_VALUE (domain_type))
12535 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12537 /* Otherwise fail. */
12538 return NULL_TREE;
12541 /* Returns true if REF is an array reference or a component reference
12542 to an array at the end of a structure.
12543 If this is the case, the array may be allocated larger
12544 than its upper bound implies. */
12546 bool
12547 array_at_struct_end_p (tree ref)
12549 tree atype;
12551 if (TREE_CODE (ref) == ARRAY_REF
12552 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12554 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12555 ref = TREE_OPERAND (ref, 0);
12557 else if (TREE_CODE (ref) == COMPONENT_REF
12558 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12559 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12560 else
12561 return false;
12563 if (TREE_CODE (ref) == STRING_CST)
12564 return false;
12566 while (handled_component_p (ref))
12568 /* If the reference chain contains a component reference to a
12569 non-union type and there follows another field the reference
12570 is not at the end of a structure. */
12571 if (TREE_CODE (ref) == COMPONENT_REF)
12573 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12575 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12576 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12577 nextf = DECL_CHAIN (nextf);
12578 if (nextf)
12579 return false;
12582 /* If we have a multi-dimensional array we do not consider
12583 a non-innermost dimension as flex array if the whole
12584 multi-dimensional array is at struct end.
12585 Same for an array of aggregates with a trailing array
12586 member. */
12587 else if (TREE_CODE (ref) == ARRAY_REF)
12588 return false;
12589 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12591 /* If we view an underlying object as sth else then what we
12592 gathered up to now is what we have to rely on. */
12593 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12594 break;
12595 else
12596 gcc_unreachable ();
12598 ref = TREE_OPERAND (ref, 0);
12601 /* The array now is at struct end. Treat flexible arrays as
12602 always subject to extend, even into just padding constrained by
12603 an underlying decl. */
12604 if (! TYPE_SIZE (atype))
12605 return true;
12607 tree size = NULL;
12609 if (TREE_CODE (ref) == MEM_REF
12610 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12612 size = TYPE_SIZE (TREE_TYPE (ref));
12613 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12616 /* If the reference is based on a declared entity, the size of the array
12617 is constrained by its given domain. (Do not trust commons PR/69368). */
12618 if (DECL_P (ref)
12619 /* Be sure the size of MEM_REF target match. For example:
12621 char buf[10];
12622 struct foo *str = (struct foo *)&buf;
12624 str->trailin_array[2] = 1;
12626 is valid because BUF allocate enough space. */
12628 && (!size || (DECL_SIZE (ref) != NULL
12629 && operand_equal_p (DECL_SIZE (ref), size, 0)))
12630 && !(flag_unconstrained_commons
12631 && VAR_P (ref) && DECL_COMMON (ref)))
12632 return false;
12634 return true;
12637 /* Return a tree representing the offset, in bytes, of the field referenced
12638 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12640 tree
12641 component_ref_field_offset (tree exp)
12643 tree aligned_offset = TREE_OPERAND (exp, 2);
12644 tree field = TREE_OPERAND (exp, 1);
12645 location_t loc = EXPR_LOCATION (exp);
12647 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12648 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12649 value. */
12650 if (aligned_offset)
12652 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12653 sizetype from another type of the same width and signedness. */
12654 if (TREE_TYPE (aligned_offset) != sizetype)
12655 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12656 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12657 size_int (DECL_OFFSET_ALIGN (field)
12658 / BITS_PER_UNIT));
12661 /* Otherwise, take the offset from that of the field. Substitute
12662 any PLACEHOLDER_EXPR that we have. */
12663 else
12664 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12667 /* Return the machine mode of T. For vectors, returns the mode of the
12668 inner type. The main use case is to feed the result to HONOR_NANS,
12669 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12671 machine_mode
12672 element_mode (const_tree t)
12674 if (!TYPE_P (t))
12675 t = TREE_TYPE (t);
12676 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12677 t = TREE_TYPE (t);
12678 return TYPE_MODE (t);
12681 /* Vector types need to re-check the target flags each time we report
12682 the machine mode. We need to do this because attribute target can
12683 change the result of vector_mode_supported_p and have_regs_of_mode
12684 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12685 change on a per-function basis. */
12686 /* ??? Possibly a better solution is to run through all the types
12687 referenced by a function and re-compute the TYPE_MODE once, rather
12688 than make the TYPE_MODE macro call a function. */
12690 machine_mode
12691 vector_type_mode (const_tree t)
12693 machine_mode mode;
12695 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12697 mode = t->type_common.mode;
12698 if (VECTOR_MODE_P (mode)
12699 && (!targetm.vector_mode_supported_p (mode)
12700 || !have_regs_of_mode[mode]))
12702 scalar_int_mode innermode;
12704 /* For integers, try mapping it to a same-sized scalar mode. */
12705 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12707 unsigned int size = (TYPE_VECTOR_SUBPARTS (t)
12708 * GET_MODE_BITSIZE (innermode));
12709 scalar_int_mode mode;
12710 if (int_mode_for_size (size, 0).exists (&mode)
12711 && have_regs_of_mode[mode])
12712 return mode;
12715 return BLKmode;
12718 return mode;
12721 /* Verify that basic properties of T match TV and thus T can be a variant of
12722 TV. TV should be the more specified variant (i.e. the main variant). */
12724 static bool
12725 verify_type_variant (const_tree t, tree tv)
12727 /* Type variant can differ by:
12729 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12730 ENCODE_QUAL_ADDR_SPACE.
12731 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12732 in this case some values may not be set in the variant types
12733 (see TYPE_COMPLETE_P checks).
12734 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12735 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12736 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12737 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12738 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12739 this is necessary to make it possible to merge types form different TUs
12740 - arrays, pointers and references may have TREE_TYPE that is a variant
12741 of TREE_TYPE of their main variants.
12742 - aggregates may have new TYPE_FIELDS list that list variants of
12743 the main variant TYPE_FIELDS.
12744 - vector types may differ by TYPE_VECTOR_OPAQUE
12747 /* Convenience macro for matching individual fields. */
12748 #define verify_variant_match(flag) \
12749 do { \
12750 if (flag (tv) != flag (t)) \
12752 error ("type variant differs by " #flag "."); \
12753 debug_tree (tv); \
12754 return false; \
12756 } while (false)
12758 /* tree_base checks. */
12760 verify_variant_match (TREE_CODE);
12761 /* FIXME: Ada builds non-artificial variants of artificial types. */
12762 if (TYPE_ARTIFICIAL (tv) && 0)
12763 verify_variant_match (TYPE_ARTIFICIAL);
12764 if (POINTER_TYPE_P (tv))
12765 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12766 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12767 verify_variant_match (TYPE_UNSIGNED);
12768 verify_variant_match (TYPE_PACKED);
12769 if (TREE_CODE (t) == REFERENCE_TYPE)
12770 verify_variant_match (TYPE_REF_IS_RVALUE);
12771 if (AGGREGATE_TYPE_P (t))
12772 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12773 else
12774 verify_variant_match (TYPE_SATURATING);
12775 /* FIXME: This check trigger during libstdc++ build. */
12776 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12777 verify_variant_match (TYPE_FINAL_P);
12779 /* tree_type_common checks. */
12781 if (COMPLETE_TYPE_P (t))
12783 verify_variant_match (TYPE_MODE);
12784 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12785 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12786 verify_variant_match (TYPE_SIZE);
12787 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12788 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12789 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12791 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12792 TYPE_SIZE_UNIT (tv), 0));
12793 error ("type variant has different TYPE_SIZE_UNIT");
12794 debug_tree (tv);
12795 error ("type variant's TYPE_SIZE_UNIT");
12796 debug_tree (TYPE_SIZE_UNIT (tv));
12797 error ("type's TYPE_SIZE_UNIT");
12798 debug_tree (TYPE_SIZE_UNIT (t));
12799 return false;
12802 verify_variant_match (TYPE_PRECISION);
12803 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12804 if (RECORD_OR_UNION_TYPE_P (t))
12805 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12806 else if (TREE_CODE (t) == ARRAY_TYPE)
12807 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12808 /* During LTO we merge variant lists from diferent translation units
12809 that may differ BY TYPE_CONTEXT that in turn may point
12810 to TRANSLATION_UNIT_DECL.
12811 Ada also builds variants of types with different TYPE_CONTEXT. */
12812 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12813 verify_variant_match (TYPE_CONTEXT);
12814 verify_variant_match (TYPE_STRING_FLAG);
12815 if (TYPE_ALIAS_SET_KNOWN_P (t))
12817 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12818 debug_tree (tv);
12819 return false;
12822 /* tree_type_non_common checks. */
12824 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12825 and dangle the pointer from time to time. */
12826 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12827 && (in_lto_p || !TYPE_VFIELD (tv)
12828 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12830 error ("type variant has different TYPE_VFIELD");
12831 debug_tree (tv);
12832 return false;
12834 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12835 || TREE_CODE (t) == INTEGER_TYPE
12836 || TREE_CODE (t) == BOOLEAN_TYPE
12837 || TREE_CODE (t) == REAL_TYPE
12838 || TREE_CODE (t) == FIXED_POINT_TYPE)
12840 verify_variant_match (TYPE_MAX_VALUE);
12841 verify_variant_match (TYPE_MIN_VALUE);
12843 if (TREE_CODE (t) == METHOD_TYPE)
12844 verify_variant_match (TYPE_METHOD_BASETYPE);
12845 if (TREE_CODE (t) == OFFSET_TYPE)
12846 verify_variant_match (TYPE_OFFSET_BASETYPE);
12847 if (TREE_CODE (t) == ARRAY_TYPE)
12848 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12849 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12850 or even type's main variant. This is needed to make bootstrap pass
12851 and the bug seems new in GCC 5.
12852 C++ FE should be updated to make this consistent and we should check
12853 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12854 is a match with main variant.
12856 Also disable the check for Java for now because of parser hack that builds
12857 first an dummy BINFO and then sometimes replace it by real BINFO in some
12858 of the copies. */
12859 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12860 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12861 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12862 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12863 at LTO time only. */
12864 && (in_lto_p && odr_type_p (t)))
12866 error ("type variant has different TYPE_BINFO");
12867 debug_tree (tv);
12868 error ("type variant's TYPE_BINFO");
12869 debug_tree (TYPE_BINFO (tv));
12870 error ("type's TYPE_BINFO");
12871 debug_tree (TYPE_BINFO (t));
12872 return false;
12875 /* Check various uses of TYPE_VALUES_RAW. */
12876 if (TREE_CODE (t) == ENUMERAL_TYPE)
12877 verify_variant_match (TYPE_VALUES);
12878 else if (TREE_CODE (t) == ARRAY_TYPE)
12879 verify_variant_match (TYPE_DOMAIN);
12880 /* Permit incomplete variants of complete type. While FEs may complete
12881 all variants, this does not happen for C++ templates in all cases. */
12882 else if (RECORD_OR_UNION_TYPE_P (t)
12883 && COMPLETE_TYPE_P (t)
12884 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12886 tree f1, f2;
12888 /* Fortran builds qualified variants as new records with items of
12889 qualified type. Verify that they looks same. */
12890 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12891 f1 && f2;
12892 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12893 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12894 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12895 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12896 /* FIXME: gfc_nonrestricted_type builds all types as variants
12897 with exception of pointer types. It deeply copies the type
12898 which means that we may end up with a variant type
12899 referring non-variant pointer. We may change it to
12900 produce types as variants, too, like
12901 objc_get_protocol_qualified_type does. */
12902 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12903 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12904 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12905 break;
12906 if (f1 || f2)
12908 error ("type variant has different TYPE_FIELDS");
12909 debug_tree (tv);
12910 error ("first mismatch is field");
12911 debug_tree (f1);
12912 error ("and field");
12913 debug_tree (f2);
12914 return false;
12917 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12918 verify_variant_match (TYPE_ARG_TYPES);
12919 /* For C++ the qualified variant of array type is really an array type
12920 of qualified TREE_TYPE.
12921 objc builds variants of pointer where pointer to type is a variant, too
12922 in objc_get_protocol_qualified_type. */
12923 if (TREE_TYPE (t) != TREE_TYPE (tv)
12924 && ((TREE_CODE (t) != ARRAY_TYPE
12925 && !POINTER_TYPE_P (t))
12926 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12927 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12929 error ("type variant has different TREE_TYPE");
12930 debug_tree (tv);
12931 error ("type variant's TREE_TYPE");
12932 debug_tree (TREE_TYPE (tv));
12933 error ("type's TREE_TYPE");
12934 debug_tree (TREE_TYPE (t));
12935 return false;
12937 if (type_with_alias_set_p (t)
12938 && !gimple_canonical_types_compatible_p (t, tv, false))
12940 error ("type is not compatible with its variant");
12941 debug_tree (tv);
12942 error ("type variant's TREE_TYPE");
12943 debug_tree (TREE_TYPE (tv));
12944 error ("type's TREE_TYPE");
12945 debug_tree (TREE_TYPE (t));
12946 return false;
12948 return true;
12949 #undef verify_variant_match
12953 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12954 the middle-end types_compatible_p function. It needs to avoid
12955 claiming types are different for types that should be treated
12956 the same with respect to TBAA. Canonical types are also used
12957 for IL consistency checks via the useless_type_conversion_p
12958 predicate which does not handle all type kinds itself but falls
12959 back to pointer-comparison of TYPE_CANONICAL for aggregates
12960 for example. */
12962 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
12963 type calculation because we need to allow inter-operability between signed
12964 and unsigned variants. */
12966 bool
12967 type_with_interoperable_signedness (const_tree type)
12969 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
12970 signed char and unsigned char. Similarly fortran FE builds
12971 C_SIZE_T as signed type, while C defines it unsigned. */
12973 return tree_code_for_canonical_type_merging (TREE_CODE (type))
12974 == INTEGER_TYPE
12975 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
12976 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
12979 /* Return true iff T1 and T2 are structurally identical for what
12980 TBAA is concerned.
12981 This function is used both by lto.c canonical type merging and by the
12982 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12983 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
12984 only for LTO because only in these cases TYPE_CANONICAL equivalence
12985 correspond to one defined by gimple_canonical_types_compatible_p. */
12987 bool
12988 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12989 bool trust_type_canonical)
12991 /* Type variants should be same as the main variant. When not doing sanity
12992 checking to verify this fact, go to main variants and save some work. */
12993 if (trust_type_canonical)
12995 t1 = TYPE_MAIN_VARIANT (t1);
12996 t2 = TYPE_MAIN_VARIANT (t2);
12999 /* Check first for the obvious case of pointer identity. */
13000 if (t1 == t2)
13001 return true;
13003 /* Check that we have two types to compare. */
13004 if (t1 == NULL_TREE || t2 == NULL_TREE)
13005 return false;
13007 /* We consider complete types always compatible with incomplete type.
13008 This does not make sense for canonical type calculation and thus we
13009 need to ensure that we are never called on it.
13011 FIXME: For more correctness the function probably should have three modes
13012 1) mode assuming that types are complete mathcing their structure
13013 2) mode allowing incomplete types but producing equivalence classes
13014 and thus ignoring all info from complete types
13015 3) mode allowing incomplete types to match complete but checking
13016 compatibility between complete types.
13018 1 and 2 can be used for canonical type calculation. 3 is the real
13019 definition of type compatibility that can be used i.e. for warnings during
13020 declaration merging. */
13022 gcc_assert (!trust_type_canonical
13023 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13024 /* If the types have been previously registered and found equal
13025 they still are. */
13027 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13028 && trust_type_canonical)
13030 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13031 they are always NULL, but they are set to non-NULL for types
13032 constructed by build_pointer_type and variants. In this case the
13033 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13034 all pointers are considered equal. Be sure to not return false
13035 negatives. */
13036 gcc_checking_assert (canonical_type_used_p (t1)
13037 && canonical_type_used_p (t2));
13038 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13041 /* Can't be the same type if the types don't have the same code. */
13042 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13043 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13044 return false;
13046 /* Qualifiers do not matter for canonical type comparison purposes. */
13048 /* Void types and nullptr types are always the same. */
13049 if (TREE_CODE (t1) == VOID_TYPE
13050 || TREE_CODE (t1) == NULLPTR_TYPE)
13051 return true;
13053 /* Can't be the same type if they have different mode. */
13054 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13055 return false;
13057 /* Non-aggregate types can be handled cheaply. */
13058 if (INTEGRAL_TYPE_P (t1)
13059 || SCALAR_FLOAT_TYPE_P (t1)
13060 || FIXED_POINT_TYPE_P (t1)
13061 || TREE_CODE (t1) == VECTOR_TYPE
13062 || TREE_CODE (t1) == COMPLEX_TYPE
13063 || TREE_CODE (t1) == OFFSET_TYPE
13064 || POINTER_TYPE_P (t1))
13066 /* Can't be the same type if they have different recision. */
13067 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13068 return false;
13070 /* In some cases the signed and unsigned types are required to be
13071 inter-operable. */
13072 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13073 && !type_with_interoperable_signedness (t1))
13074 return false;
13076 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13077 interoperable with "signed char". Unless all frontends are revisited
13078 to agree on these types, we must ignore the flag completely. */
13080 /* Fortran standard define C_PTR type that is compatible with every
13081 C pointer. For this reason we need to glob all pointers into one.
13082 Still pointers in different address spaces are not compatible. */
13083 if (POINTER_TYPE_P (t1))
13085 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13086 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13087 return false;
13090 /* Tail-recurse to components. */
13091 if (TREE_CODE (t1) == VECTOR_TYPE
13092 || TREE_CODE (t1) == COMPLEX_TYPE)
13093 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13094 TREE_TYPE (t2),
13095 trust_type_canonical);
13097 return true;
13100 /* Do type-specific comparisons. */
13101 switch (TREE_CODE (t1))
13103 case ARRAY_TYPE:
13104 /* Array types are the same if the element types are the same and
13105 the number of elements are the same. */
13106 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13107 trust_type_canonical)
13108 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13109 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13110 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13111 return false;
13112 else
13114 tree i1 = TYPE_DOMAIN (t1);
13115 tree i2 = TYPE_DOMAIN (t2);
13117 /* For an incomplete external array, the type domain can be
13118 NULL_TREE. Check this condition also. */
13119 if (i1 == NULL_TREE && i2 == NULL_TREE)
13120 return true;
13121 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13122 return false;
13123 else
13125 tree min1 = TYPE_MIN_VALUE (i1);
13126 tree min2 = TYPE_MIN_VALUE (i2);
13127 tree max1 = TYPE_MAX_VALUE (i1);
13128 tree max2 = TYPE_MAX_VALUE (i2);
13130 /* The minimum/maximum values have to be the same. */
13131 if ((min1 == min2
13132 || (min1 && min2
13133 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13134 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13135 || operand_equal_p (min1, min2, 0))))
13136 && (max1 == max2
13137 || (max1 && max2
13138 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13139 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13140 || operand_equal_p (max1, max2, 0)))))
13141 return true;
13142 else
13143 return false;
13147 case METHOD_TYPE:
13148 case FUNCTION_TYPE:
13149 /* Function types are the same if the return type and arguments types
13150 are the same. */
13151 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13152 trust_type_canonical))
13153 return false;
13155 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13156 return true;
13157 else
13159 tree parms1, parms2;
13161 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13162 parms1 && parms2;
13163 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13165 if (!gimple_canonical_types_compatible_p
13166 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13167 trust_type_canonical))
13168 return false;
13171 if (parms1 || parms2)
13172 return false;
13174 return true;
13177 case RECORD_TYPE:
13178 case UNION_TYPE:
13179 case QUAL_UNION_TYPE:
13181 tree f1, f2;
13183 /* Don't try to compare variants of an incomplete type, before
13184 TYPE_FIELDS has been copied around. */
13185 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13186 return true;
13189 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13190 return false;
13192 /* For aggregate types, all the fields must be the same. */
13193 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13194 f1 || f2;
13195 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13197 /* Skip non-fields and zero-sized fields. */
13198 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13199 || (DECL_SIZE (f1)
13200 && integer_zerop (DECL_SIZE (f1)))))
13201 f1 = TREE_CHAIN (f1);
13202 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13203 || (DECL_SIZE (f2)
13204 && integer_zerop (DECL_SIZE (f2)))))
13205 f2 = TREE_CHAIN (f2);
13206 if (!f1 || !f2)
13207 break;
13208 /* The fields must have the same name, offset and type. */
13209 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13210 || !gimple_compare_field_offset (f1, f2)
13211 || !gimple_canonical_types_compatible_p
13212 (TREE_TYPE (f1), TREE_TYPE (f2),
13213 trust_type_canonical))
13214 return false;
13217 /* If one aggregate has more fields than the other, they
13218 are not the same. */
13219 if (f1 || f2)
13220 return false;
13222 return true;
13225 default:
13226 /* Consider all types with language specific trees in them mutually
13227 compatible. This is executed only from verify_type and false
13228 positives can be tolerated. */
13229 gcc_assert (!in_lto_p);
13230 return true;
13234 /* Verify type T. */
13236 void
13237 verify_type (const_tree t)
13239 bool error_found = false;
13240 tree mv = TYPE_MAIN_VARIANT (t);
13241 if (!mv)
13243 error ("Main variant is not defined");
13244 error_found = true;
13246 else if (mv != TYPE_MAIN_VARIANT (mv))
13248 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13249 debug_tree (mv);
13250 error_found = true;
13252 else if (t != mv && !verify_type_variant (t, mv))
13253 error_found = true;
13255 tree ct = TYPE_CANONICAL (t);
13256 if (!ct)
13258 else if (TYPE_CANONICAL (t) != ct)
13260 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13261 debug_tree (ct);
13262 error_found = true;
13264 /* Method and function types can not be used to address memory and thus
13265 TYPE_CANONICAL really matters only for determining useless conversions.
13267 FIXME: C++ FE produce declarations of builtin functions that are not
13268 compatible with main variants. */
13269 else if (TREE_CODE (t) == FUNCTION_TYPE)
13271 else if (t != ct
13272 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13273 with variably sized arrays because their sizes possibly
13274 gimplified to different variables. */
13275 && !variably_modified_type_p (ct, NULL)
13276 && !gimple_canonical_types_compatible_p (t, ct, false))
13278 error ("TYPE_CANONICAL is not compatible");
13279 debug_tree (ct);
13280 error_found = true;
13283 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13284 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13286 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13287 debug_tree (ct);
13288 error_found = true;
13290 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13292 error ("TYPE_CANONICAL of main variant is not main variant");
13293 debug_tree (ct);
13294 debug_tree (TYPE_MAIN_VARIANT (ct));
13295 error_found = true;
13299 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13300 if (RECORD_OR_UNION_TYPE_P (t))
13302 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13303 and danagle the pointer from time to time. */
13304 if (TYPE_VFIELD (t)
13305 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13306 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13308 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13309 debug_tree (TYPE_VFIELD (t));
13310 error_found = true;
13313 else if (TREE_CODE (t) == POINTER_TYPE)
13315 if (TYPE_NEXT_PTR_TO (t)
13316 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13318 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13319 debug_tree (TYPE_NEXT_PTR_TO (t));
13320 error_found = true;
13323 else if (TREE_CODE (t) == REFERENCE_TYPE)
13325 if (TYPE_NEXT_REF_TO (t)
13326 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13328 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13329 debug_tree (TYPE_NEXT_REF_TO (t));
13330 error_found = true;
13333 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13334 || TREE_CODE (t) == FIXED_POINT_TYPE)
13336 /* FIXME: The following check should pass:
13337 useless_type_conversion_p (const_cast <tree> (t),
13338 TREE_TYPE (TYPE_MIN_VALUE (t))
13339 but does not for C sizetypes in LTO. */
13342 /* Check various uses of TYPE_MAXVAL_RAW. */
13343 if (RECORD_OR_UNION_TYPE_P (t))
13345 if (!TYPE_BINFO (t))
13347 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13349 error ("TYPE_BINFO is not TREE_BINFO");
13350 debug_tree (TYPE_BINFO (t));
13351 error_found = true;
13353 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13355 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13356 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13357 error_found = true;
13360 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13362 if (TYPE_METHOD_BASETYPE (t)
13363 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13364 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13366 error ("TYPE_METHOD_BASETYPE is not record nor union");
13367 debug_tree (TYPE_METHOD_BASETYPE (t));
13368 error_found = true;
13371 else if (TREE_CODE (t) == OFFSET_TYPE)
13373 if (TYPE_OFFSET_BASETYPE (t)
13374 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13375 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13377 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13378 debug_tree (TYPE_OFFSET_BASETYPE (t));
13379 error_found = true;
13382 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13383 || TREE_CODE (t) == FIXED_POINT_TYPE)
13385 /* FIXME: The following check should pass:
13386 useless_type_conversion_p (const_cast <tree> (t),
13387 TREE_TYPE (TYPE_MAX_VALUE (t))
13388 but does not for C sizetypes in LTO. */
13390 else if (TREE_CODE (t) == ARRAY_TYPE)
13392 if (TYPE_ARRAY_MAX_SIZE (t)
13393 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13395 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13396 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13397 error_found = true;
13400 else if (TYPE_MAX_VALUE_RAW (t))
13402 error ("TYPE_MAX_VALUE_RAW non-NULL");
13403 debug_tree (TYPE_MAX_VALUE_RAW (t));
13404 error_found = true;
13407 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13409 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13410 debug_tree (TYPE_LANG_SLOT_1 (t));
13411 error_found = true;
13414 /* Check various uses of TYPE_VALUES_RAW. */
13415 if (TREE_CODE (t) == ENUMERAL_TYPE)
13416 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13418 tree value = TREE_VALUE (l);
13419 tree name = TREE_PURPOSE (l);
13421 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13422 CONST_DECL of ENUMERAL TYPE. */
13423 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13425 error ("Enum value is not CONST_DECL or INTEGER_CST");
13426 debug_tree (value);
13427 debug_tree (name);
13428 error_found = true;
13430 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13431 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13433 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13434 debug_tree (value);
13435 debug_tree (name);
13436 error_found = true;
13438 if (TREE_CODE (name) != IDENTIFIER_NODE)
13440 error ("Enum value name is not IDENTIFIER_NODE");
13441 debug_tree (value);
13442 debug_tree (name);
13443 error_found = true;
13446 else if (TREE_CODE (t) == ARRAY_TYPE)
13448 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13450 error ("Array TYPE_DOMAIN is not integer type");
13451 debug_tree (TYPE_DOMAIN (t));
13452 error_found = true;
13455 else if (RECORD_OR_UNION_TYPE_P (t))
13457 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13459 error ("TYPE_FIELDS defined in incomplete type");
13460 error_found = true;
13462 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13464 /* TODO: verify properties of decls. */
13465 if (TREE_CODE (fld) == FIELD_DECL)
13467 else if (TREE_CODE (fld) == TYPE_DECL)
13469 else if (TREE_CODE (fld) == CONST_DECL)
13471 else if (VAR_P (fld))
13473 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13475 else if (TREE_CODE (fld) == USING_DECL)
13477 else if (TREE_CODE (fld) == FUNCTION_DECL)
13479 else
13481 error ("Wrong tree in TYPE_FIELDS list");
13482 debug_tree (fld);
13483 error_found = true;
13487 else if (TREE_CODE (t) == INTEGER_TYPE
13488 || TREE_CODE (t) == BOOLEAN_TYPE
13489 || TREE_CODE (t) == OFFSET_TYPE
13490 || TREE_CODE (t) == REFERENCE_TYPE
13491 || TREE_CODE (t) == NULLPTR_TYPE
13492 || TREE_CODE (t) == POINTER_TYPE)
13494 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13496 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13497 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13498 error_found = true;
13500 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13502 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13503 debug_tree (TYPE_CACHED_VALUES (t));
13504 error_found = true;
13506 /* Verify just enough of cache to ensure that no one copied it to new type.
13507 All copying should go by copy_node that should clear it. */
13508 else if (TYPE_CACHED_VALUES_P (t))
13510 int i;
13511 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13512 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13513 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13515 error ("wrong TYPE_CACHED_VALUES entry");
13516 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13517 error_found = true;
13518 break;
13522 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13523 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13525 /* C++ FE uses TREE_PURPOSE to store initial values. */
13526 if (TREE_PURPOSE (l) && in_lto_p)
13528 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13529 debug_tree (l);
13530 error_found = true;
13532 if (!TYPE_P (TREE_VALUE (l)))
13534 error ("Wrong entry in TYPE_ARG_TYPES list");
13535 debug_tree (l);
13536 error_found = true;
13539 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13541 error ("TYPE_VALUES_RAW field is non-NULL");
13542 debug_tree (TYPE_VALUES_RAW (t));
13543 error_found = true;
13545 if (TREE_CODE (t) != INTEGER_TYPE
13546 && TREE_CODE (t) != BOOLEAN_TYPE
13547 && TREE_CODE (t) != OFFSET_TYPE
13548 && TREE_CODE (t) != REFERENCE_TYPE
13549 && TREE_CODE (t) != NULLPTR_TYPE
13550 && TREE_CODE (t) != POINTER_TYPE
13551 && TYPE_CACHED_VALUES_P (t))
13553 error ("TYPE_CACHED_VALUES_P is set while it should not");
13554 error_found = true;
13556 if (TYPE_STRING_FLAG (t)
13557 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13559 error ("TYPE_STRING_FLAG is set on wrong type code");
13560 error_found = true;
13563 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13564 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13565 of a type. */
13566 if (TREE_CODE (t) == METHOD_TYPE
13567 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13569 error ("TYPE_METHOD_BASETYPE is not main variant");
13570 error_found = true;
13573 if (error_found)
13575 debug_tree (const_cast <tree> (t));
13576 internal_error ("verify_type failed");
13581 /* Return 1 if ARG interpreted as signed in its precision is known to be
13582 always positive or 2 if ARG is known to be always negative, or 3 if
13583 ARG may be positive or negative. */
13586 get_range_pos_neg (tree arg)
13588 if (arg == error_mark_node)
13589 return 3;
13591 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13592 int cnt = 0;
13593 if (TREE_CODE (arg) == INTEGER_CST)
13595 wide_int w = wi::sext (wi::to_wide (arg), prec);
13596 if (wi::neg_p (w))
13597 return 2;
13598 else
13599 return 1;
13601 while (CONVERT_EXPR_P (arg)
13602 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13603 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13605 arg = TREE_OPERAND (arg, 0);
13606 /* Narrower value zero extended into wider type
13607 will always result in positive values. */
13608 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13609 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13610 return 1;
13611 prec = TYPE_PRECISION (TREE_TYPE (arg));
13612 if (++cnt > 30)
13613 return 3;
13616 if (TREE_CODE (arg) != SSA_NAME)
13617 return 3;
13618 wide_int arg_min, arg_max;
13619 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13621 gimple *g = SSA_NAME_DEF_STMT (arg);
13622 if (is_gimple_assign (g)
13623 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13625 tree t = gimple_assign_rhs1 (g);
13626 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13627 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13629 if (TYPE_UNSIGNED (TREE_TYPE (t))
13630 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13631 return 1;
13632 prec = TYPE_PRECISION (TREE_TYPE (t));
13633 arg = t;
13634 if (++cnt > 30)
13635 return 3;
13636 continue;
13639 return 3;
13641 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13643 /* For unsigned values, the "positive" range comes
13644 below the "negative" range. */
13645 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13646 return 1;
13647 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13648 return 2;
13650 else
13652 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13653 return 1;
13654 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13655 return 2;
13657 return 3;
13663 /* Return true if ARG is marked with the nonnull attribute in the
13664 current function signature. */
13666 bool
13667 nonnull_arg_p (const_tree arg)
13669 tree t, attrs, fntype;
13670 unsigned HOST_WIDE_INT arg_num;
13672 gcc_assert (TREE_CODE (arg) == PARM_DECL
13673 && (POINTER_TYPE_P (TREE_TYPE (arg))
13674 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13676 /* The static chain decl is always non null. */
13677 if (arg == cfun->static_chain_decl)
13678 return true;
13680 /* THIS argument of method is always non-NULL. */
13681 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13682 && arg == DECL_ARGUMENTS (cfun->decl)
13683 && flag_delete_null_pointer_checks)
13684 return true;
13686 /* Values passed by reference are always non-NULL. */
13687 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13688 && flag_delete_null_pointer_checks)
13689 return true;
13691 fntype = TREE_TYPE (cfun->decl);
13692 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13694 attrs = lookup_attribute ("nonnull", attrs);
13696 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13697 if (attrs == NULL_TREE)
13698 return false;
13700 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13701 if (TREE_VALUE (attrs) == NULL_TREE)
13702 return true;
13704 /* Get the position number for ARG in the function signature. */
13705 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13707 t = DECL_CHAIN (t), arg_num++)
13709 if (t == arg)
13710 break;
13713 gcc_assert (t == arg);
13715 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13716 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13718 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13719 return true;
13723 return false;
13726 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13727 information. */
13729 location_t
13730 set_block (location_t loc, tree block)
13732 location_t pure_loc = get_pure_location (loc);
13733 source_range src_range = get_range_from_loc (line_table, loc);
13734 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13737 location_t
13738 set_source_range (tree expr, location_t start, location_t finish)
13740 source_range src_range;
13741 src_range.m_start = start;
13742 src_range.m_finish = finish;
13743 return set_source_range (expr, src_range);
13746 location_t
13747 set_source_range (tree expr, source_range src_range)
13749 if (!EXPR_P (expr))
13750 return UNKNOWN_LOCATION;
13752 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13753 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13754 pure_loc,
13755 src_range,
13756 NULL);
13757 SET_EXPR_LOCATION (expr, adhoc);
13758 return adhoc;
13761 /* Return the name of combined function FN, for debugging purposes. */
13763 const char *
13764 combined_fn_name (combined_fn fn)
13766 if (builtin_fn_p (fn))
13768 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13769 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13771 else
13772 return internal_fn_name (as_internal_fn (fn));
13775 /* Return a bitmap with a bit set corresponding to each argument in
13776 a function call type FNTYPE declared with attribute nonnull,
13777 or null if none of the function's argument are nonnull. The caller
13778 must free the bitmap. */
13780 bitmap
13781 get_nonnull_args (const_tree fntype)
13783 if (fntype == NULL_TREE)
13784 return NULL;
13786 tree attrs = TYPE_ATTRIBUTES (fntype);
13787 if (!attrs)
13788 return NULL;
13790 bitmap argmap = NULL;
13792 /* A function declaration can specify multiple attribute nonnull,
13793 each with zero or more arguments. The loop below creates a bitmap
13794 representing a union of all the arguments. An empty (but non-null)
13795 bitmap means that all arguments have been declaraed nonnull. */
13796 for ( ; attrs; attrs = TREE_CHAIN (attrs))
13798 attrs = lookup_attribute ("nonnull", attrs);
13799 if (!attrs)
13800 break;
13802 if (!argmap)
13803 argmap = BITMAP_ALLOC (NULL);
13805 if (!TREE_VALUE (attrs))
13807 /* Clear the bitmap in case a previous attribute nonnull
13808 set it and this one overrides it for all arguments. */
13809 bitmap_clear (argmap);
13810 return argmap;
13813 /* Iterate over the indices of the format arguments declared nonnull
13814 and set a bit for each. */
13815 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
13817 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
13818 bitmap_set_bit (argmap, val);
13822 return argmap;
13825 /* List of pointer types used to declare builtins before we have seen their
13826 real declaration.
13828 Keep the size up to date in tree.h ! */
13829 const builtin_structptr_type builtin_structptr_types[6] =
13831 { fileptr_type_node, ptr_type_node, "FILE" },
13832 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
13833 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
13834 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
13835 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
13836 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
13839 #if CHECKING_P
13841 namespace selftest {
13843 /* Selftests for tree. */
13845 /* Verify that integer constants are sane. */
13847 static void
13848 test_integer_constants ()
13850 ASSERT_TRUE (integer_type_node != NULL);
13851 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
13853 tree type = integer_type_node;
13855 tree zero = build_zero_cst (type);
13856 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
13857 ASSERT_EQ (type, TREE_TYPE (zero));
13859 tree one = build_int_cst (type, 1);
13860 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
13861 ASSERT_EQ (type, TREE_TYPE (zero));
13864 /* Verify identifiers. */
13866 static void
13867 test_identifiers ()
13869 tree identifier = get_identifier ("foo");
13870 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
13871 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
13874 /* Verify LABEL_DECL. */
13876 static void
13877 test_labels ()
13879 tree identifier = get_identifier ("err");
13880 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
13881 identifier, void_type_node);
13882 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
13883 ASSERT_FALSE (FORCED_LABEL (label_decl));
13886 /* Run all of the selftests within this file. */
13888 void
13889 tree_c_tests ()
13891 test_integer_constants ();
13892 test_identifiers ();
13893 test_labels ();
13896 } // namespace selftest
13898 #endif /* CHECKING_P */
13900 #include "gt-tree.h"