* asan.c (handle_builtin_alloca): Deal with all alloca variants.
[official-gcc.git] / gcc / tree.c
blob1b20ad6e9e8aeb25dc939227e8dee6fcaa99636f
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
70 /* Tree code classes. */
72 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
73 #define END_OF_BASE_TREE_CODES tcc_exceptional,
75 const enum tree_code_class tree_code_type[] = {
76 #include "all-tree.def"
79 #undef DEFTREECODE
80 #undef END_OF_BASE_TREE_CODES
82 /* Table indexed by tree code giving number of expression
83 operands beyond the fixed part of the node structure.
84 Not used for types or decls. */
86 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
87 #define END_OF_BASE_TREE_CODES 0,
89 const unsigned char tree_code_length[] = {
90 #include "all-tree.def"
93 #undef DEFTREECODE
94 #undef END_OF_BASE_TREE_CODES
96 /* Names of tree components.
97 Used for printing out the tree and error messages. */
98 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
99 #define END_OF_BASE_TREE_CODES "@dummy",
101 static const char *const tree_code_name[] = {
102 #include "all-tree.def"
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
108 /* Each tree code class has an associated string representation.
109 These must correspond to the tree_code_class entries. */
111 const char *const tree_code_class_strings[] =
113 "exceptional",
114 "constant",
115 "type",
116 "declaration",
117 "reference",
118 "comparison",
119 "unary",
120 "binary",
121 "statement",
122 "vl_exp",
123 "expression"
126 /* obstack.[ch] explicitly declined to prototype this. */
127 extern int _obstack_allocated_p (struct obstack *h, void *obj);
129 /* Statistics-gathering stuff. */
131 static int tree_code_counts[MAX_TREE_CODES];
132 int tree_node_counts[(int) all_kinds];
133 int tree_node_sizes[(int) all_kinds];
135 /* Keep in sync with tree.h:enum tree_node_kind. */
136 static const char * const tree_node_kind_names[] = {
137 "decls",
138 "types",
139 "blocks",
140 "stmts",
141 "refs",
142 "exprs",
143 "constants",
144 "identifiers",
145 "vecs",
146 "binfos",
147 "ssa names",
148 "constructors",
149 "random kinds",
150 "lang_decl kinds",
151 "lang_type kinds",
152 "omp clauses",
155 /* Unique id for next decl created. */
156 static GTY(()) int next_decl_uid;
157 /* Unique id for next type created. */
158 static GTY(()) unsigned next_type_uid = 1;
159 /* Unique id for next debug decl created. Use negative numbers,
160 to catch erroneous uses. */
161 static GTY(()) int next_debug_decl_uid;
163 /* Since we cannot rehash a type after it is in the table, we have to
164 keep the hash code. */
166 struct GTY((for_user)) type_hash {
167 unsigned long hash;
168 tree type;
171 /* Initial size of the hash table (rounded to next prime). */
172 #define TYPE_HASH_INITIAL_SIZE 1000
174 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
176 static hashval_t hash (type_hash *t) { return t->hash; }
177 static bool equal (type_hash *a, type_hash *b);
179 static int
180 keep_cache_entry (type_hash *&t)
182 return ggc_marked_p (t->type);
186 /* Now here is the hash table. When recording a type, it is added to
187 the slot whose index is the hash code. Note that the hash table is
188 used for several kinds of types (function types, array types and
189 array index range types, for now). While all these live in the
190 same table, they are completely independent, and the hash code is
191 computed differently for each of these. */
193 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
198 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
200 static hashval_t hash (tree t);
201 static bool equal (tree x, tree y);
204 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
206 /* Hash table for optimization flags and target option flags. Use the same
207 hash table for both sets of options. Nodes for building the current
208 optimization and target option nodes. The assumption is most of the time
209 the options created will already be in the hash table, so we avoid
210 allocating and freeing up a node repeatably. */
211 static GTY (()) tree cl_optimization_node;
212 static GTY (()) tree cl_target_option_node;
214 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
216 static hashval_t hash (tree t);
217 static bool equal (tree x, tree y);
220 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
222 /* General tree->tree mapping structure for use in hash tables. */
225 static GTY ((cache))
226 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
228 static GTY ((cache))
229 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
231 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
233 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
235 static bool
236 equal (tree_vec_map *a, tree_vec_map *b)
238 return a->base.from == b->base.from;
241 static int
242 keep_cache_entry (tree_vec_map *&m)
244 return ggc_marked_p (m->base.from);
248 static GTY ((cache))
249 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
251 static void set_type_quals (tree, int);
252 static void print_type_hash_statistics (void);
253 static void print_debug_expr_statistics (void);
254 static void print_value_expr_statistics (void);
256 tree global_trees[TI_MAX];
257 tree integer_types[itk_none];
259 bool int_n_enabled_p[NUM_INT_N_ENTS];
260 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
262 bool tree_contains_struct[MAX_TREE_CODES][64];
264 /* Number of operands for each OpenMP clause. */
265 unsigned const char omp_clause_num_ops[] =
267 0, /* OMP_CLAUSE_ERROR */
268 1, /* OMP_CLAUSE_PRIVATE */
269 1, /* OMP_CLAUSE_SHARED */
270 1, /* OMP_CLAUSE_FIRSTPRIVATE */
271 2, /* OMP_CLAUSE_LASTPRIVATE */
272 5, /* OMP_CLAUSE_REDUCTION */
273 1, /* OMP_CLAUSE_COPYIN */
274 1, /* OMP_CLAUSE_COPYPRIVATE */
275 3, /* OMP_CLAUSE_LINEAR */
276 2, /* OMP_CLAUSE_ALIGNED */
277 1, /* OMP_CLAUSE_DEPEND */
278 1, /* OMP_CLAUSE_UNIFORM */
279 1, /* OMP_CLAUSE_TO_DECLARE */
280 1, /* OMP_CLAUSE_LINK */
281 2, /* OMP_CLAUSE_FROM */
282 2, /* OMP_CLAUSE_TO */
283 2, /* OMP_CLAUSE_MAP */
284 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
285 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
286 2, /* OMP_CLAUSE__CACHE_ */
287 2, /* OMP_CLAUSE_GANG */
288 1, /* OMP_CLAUSE_ASYNC */
289 1, /* OMP_CLAUSE_WAIT */
290 0, /* OMP_CLAUSE_AUTO */
291 0, /* OMP_CLAUSE_SEQ */
292 1, /* OMP_CLAUSE__LOOPTEMP_ */
293 1, /* OMP_CLAUSE_IF */
294 1, /* OMP_CLAUSE_NUM_THREADS */
295 1, /* OMP_CLAUSE_SCHEDULE */
296 0, /* OMP_CLAUSE_NOWAIT */
297 1, /* OMP_CLAUSE_ORDERED */
298 0, /* OMP_CLAUSE_DEFAULT */
299 3, /* OMP_CLAUSE_COLLAPSE */
300 0, /* OMP_CLAUSE_UNTIED */
301 1, /* OMP_CLAUSE_FINAL */
302 0, /* OMP_CLAUSE_MERGEABLE */
303 1, /* OMP_CLAUSE_DEVICE */
304 1, /* OMP_CLAUSE_DIST_SCHEDULE */
305 0, /* OMP_CLAUSE_INBRANCH */
306 0, /* OMP_CLAUSE_NOTINBRANCH */
307 1, /* OMP_CLAUSE_NUM_TEAMS */
308 1, /* OMP_CLAUSE_THREAD_LIMIT */
309 0, /* OMP_CLAUSE_PROC_BIND */
310 1, /* OMP_CLAUSE_SAFELEN */
311 1, /* OMP_CLAUSE_SIMDLEN */
312 0, /* OMP_CLAUSE_FOR */
313 0, /* OMP_CLAUSE_PARALLEL */
314 0, /* OMP_CLAUSE_SECTIONS */
315 0, /* OMP_CLAUSE_TASKGROUP */
316 1, /* OMP_CLAUSE_PRIORITY */
317 1, /* OMP_CLAUSE_GRAINSIZE */
318 1, /* OMP_CLAUSE_NUM_TASKS */
319 0, /* OMP_CLAUSE_NOGROUP */
320 0, /* OMP_CLAUSE_THREADS */
321 0, /* OMP_CLAUSE_SIMD */
322 1, /* OMP_CLAUSE_HINT */
323 0, /* OMP_CLAUSE_DEFALTMAP */
324 1, /* OMP_CLAUSE__SIMDUID_ */
325 0, /* OMP_CLAUSE__SIMT_ */
326 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 3, /* OMP_CLAUSE_TILE */
334 2, /* OMP_CLAUSE__GRIDDIM_ */
337 const char * const omp_clause_code_name[] =
339 "error_clause",
340 "private",
341 "shared",
342 "firstprivate",
343 "lastprivate",
344 "reduction",
345 "copyin",
346 "copyprivate",
347 "linear",
348 "aligned",
349 "depend",
350 "uniform",
351 "to",
352 "link",
353 "from",
354 "to",
355 "map",
356 "use_device_ptr",
357 "is_device_ptr",
358 "_cache_",
359 "gang",
360 "async",
361 "wait",
362 "auto",
363 "seq",
364 "_looptemp_",
365 "if",
366 "num_threads",
367 "schedule",
368 "nowait",
369 "ordered",
370 "default",
371 "collapse",
372 "untied",
373 "final",
374 "mergeable",
375 "device",
376 "dist_schedule",
377 "inbranch",
378 "notinbranch",
379 "num_teams",
380 "thread_limit",
381 "proc_bind",
382 "safelen",
383 "simdlen",
384 "for",
385 "parallel",
386 "sections",
387 "taskgroup",
388 "priority",
389 "grainsize",
390 "num_tasks",
391 "nogroup",
392 "threads",
393 "simd",
394 "hint",
395 "defaultmap",
396 "_simduid_",
397 "_simt_",
398 "_Cilk_for_count_",
399 "independent",
400 "worker",
401 "vector",
402 "num_gangs",
403 "num_workers",
404 "vector_length",
405 "tile",
406 "_griddim_"
410 /* Return the tree node structure used by tree code CODE. */
412 static inline enum tree_node_structure_enum
413 tree_node_structure_for_code (enum tree_code code)
415 switch (TREE_CODE_CLASS (code))
417 case tcc_declaration:
419 switch (code)
421 case FIELD_DECL:
422 return TS_FIELD_DECL;
423 case PARM_DECL:
424 return TS_PARM_DECL;
425 case VAR_DECL:
426 return TS_VAR_DECL;
427 case LABEL_DECL:
428 return TS_LABEL_DECL;
429 case RESULT_DECL:
430 return TS_RESULT_DECL;
431 case DEBUG_EXPR_DECL:
432 return TS_DECL_WRTL;
433 case CONST_DECL:
434 return TS_CONST_DECL;
435 case TYPE_DECL:
436 return TS_TYPE_DECL;
437 case FUNCTION_DECL:
438 return TS_FUNCTION_DECL;
439 case TRANSLATION_UNIT_DECL:
440 return TS_TRANSLATION_UNIT_DECL;
441 default:
442 return TS_DECL_NON_COMMON;
445 case tcc_type:
446 return TS_TYPE_NON_COMMON;
447 case tcc_reference:
448 case tcc_comparison:
449 case tcc_unary:
450 case tcc_binary:
451 case tcc_expression:
452 case tcc_statement:
453 case tcc_vl_exp:
454 return TS_EXP;
455 default: /* tcc_constant and tcc_exceptional */
456 break;
458 switch (code)
460 /* tcc_constant cases. */
461 case VOID_CST: return TS_TYPED;
462 case INTEGER_CST: return TS_INT_CST;
463 case REAL_CST: return TS_REAL_CST;
464 case FIXED_CST: return TS_FIXED_CST;
465 case COMPLEX_CST: return TS_COMPLEX;
466 case VECTOR_CST: return TS_VECTOR;
467 case STRING_CST: return TS_STRING;
468 /* tcc_exceptional cases. */
469 case ERROR_MARK: return TS_COMMON;
470 case IDENTIFIER_NODE: return TS_IDENTIFIER;
471 case TREE_LIST: return TS_LIST;
472 case TREE_VEC: return TS_VEC;
473 case SSA_NAME: return TS_SSA_NAME;
474 case PLACEHOLDER_EXPR: return TS_COMMON;
475 case STATEMENT_LIST: return TS_STATEMENT_LIST;
476 case BLOCK: return TS_BLOCK;
477 case CONSTRUCTOR: return TS_CONSTRUCTOR;
478 case TREE_BINFO: return TS_BINFO;
479 case OMP_CLAUSE: return TS_OMP_CLAUSE;
480 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
481 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
483 default:
484 gcc_unreachable ();
489 /* Initialize tree_contains_struct to describe the hierarchy of tree
490 nodes. */
492 static void
493 initialize_tree_contains_struct (void)
495 unsigned i;
497 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
499 enum tree_code code;
500 enum tree_node_structure_enum ts_code;
502 code = (enum tree_code) i;
503 ts_code = tree_node_structure_for_code (code);
505 /* Mark the TS structure itself. */
506 tree_contains_struct[code][ts_code] = 1;
508 /* Mark all the structures that TS is derived from. */
509 switch (ts_code)
511 case TS_TYPED:
512 case TS_BLOCK:
513 case TS_OPTIMIZATION:
514 case TS_TARGET_OPTION:
515 MARK_TS_BASE (code);
516 break;
518 case TS_COMMON:
519 case TS_INT_CST:
520 case TS_REAL_CST:
521 case TS_FIXED_CST:
522 case TS_VECTOR:
523 case TS_STRING:
524 case TS_COMPLEX:
525 case TS_SSA_NAME:
526 case TS_CONSTRUCTOR:
527 case TS_EXP:
528 case TS_STATEMENT_LIST:
529 MARK_TS_TYPED (code);
530 break;
532 case TS_IDENTIFIER:
533 case TS_DECL_MINIMAL:
534 case TS_TYPE_COMMON:
535 case TS_LIST:
536 case TS_VEC:
537 case TS_BINFO:
538 case TS_OMP_CLAUSE:
539 MARK_TS_COMMON (code);
540 break;
542 case TS_TYPE_WITH_LANG_SPECIFIC:
543 MARK_TS_TYPE_COMMON (code);
544 break;
546 case TS_TYPE_NON_COMMON:
547 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
548 break;
550 case TS_DECL_COMMON:
551 MARK_TS_DECL_MINIMAL (code);
552 break;
554 case TS_DECL_WRTL:
555 case TS_CONST_DECL:
556 MARK_TS_DECL_COMMON (code);
557 break;
559 case TS_DECL_NON_COMMON:
560 MARK_TS_DECL_WITH_VIS (code);
561 break;
563 case TS_DECL_WITH_VIS:
564 case TS_PARM_DECL:
565 case TS_LABEL_DECL:
566 case TS_RESULT_DECL:
567 MARK_TS_DECL_WRTL (code);
568 break;
570 case TS_FIELD_DECL:
571 MARK_TS_DECL_COMMON (code);
572 break;
574 case TS_VAR_DECL:
575 MARK_TS_DECL_WITH_VIS (code);
576 break;
578 case TS_TYPE_DECL:
579 case TS_FUNCTION_DECL:
580 MARK_TS_DECL_NON_COMMON (code);
581 break;
583 case TS_TRANSLATION_UNIT_DECL:
584 MARK_TS_DECL_COMMON (code);
585 break;
587 default:
588 gcc_unreachable ();
592 /* Basic consistency checks for attributes used in fold. */
593 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
594 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
595 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
603 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
604 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
608 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
609 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
621 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
622 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
624 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
625 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
626 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
628 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
629 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
632 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
636 /* Init tree.c. */
638 void
639 init_ttree (void)
641 /* Initialize the hash table of types. */
642 type_hash_table
643 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
645 debug_expr_for_decl
646 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
648 value_expr_for_decl
649 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
651 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
653 int_cst_node = make_int_cst (1, 1);
655 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
657 cl_optimization_node = make_node (OPTIMIZATION_NODE);
658 cl_target_option_node = make_node (TARGET_OPTION_NODE);
660 /* Initialize the tree_contains_struct array. */
661 initialize_tree_contains_struct ();
662 lang_hooks.init_ts ();
666 /* The name of the object as the assembler will see it (but before any
667 translations made by ASM_OUTPUT_LABELREF). Often this is the same
668 as DECL_NAME. It is an IDENTIFIER_NODE. */
669 tree
670 decl_assembler_name (tree decl)
672 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
673 lang_hooks.set_decl_assembler_name (decl);
674 return DECL_ASSEMBLER_NAME_RAW (decl);
677 /* When the target supports COMDAT groups, this indicates which group the
678 DECL is associated with. This can be either an IDENTIFIER_NODE or a
679 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
680 tree
681 decl_comdat_group (const_tree node)
683 struct symtab_node *snode = symtab_node::get (node);
684 if (!snode)
685 return NULL;
686 return snode->get_comdat_group ();
689 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
690 tree
691 decl_comdat_group_id (const_tree node)
693 struct symtab_node *snode = symtab_node::get (node);
694 if (!snode)
695 return NULL;
696 return snode->get_comdat_group_id ();
699 /* When the target supports named section, return its name as IDENTIFIER_NODE
700 or NULL if it is in no section. */
701 const char *
702 decl_section_name (const_tree node)
704 struct symtab_node *snode = symtab_node::get (node);
705 if (!snode)
706 return NULL;
707 return snode->get_section ();
710 /* Set section name of NODE to VALUE (that is expected to be
711 identifier node) */
712 void
713 set_decl_section_name (tree node, const char *value)
715 struct symtab_node *snode;
717 if (value == NULL)
719 snode = symtab_node::get (node);
720 if (!snode)
721 return;
723 else if (VAR_P (node))
724 snode = varpool_node::get_create (node);
725 else
726 snode = cgraph_node::get_create (node);
727 snode->set_section (value);
730 /* Return TLS model of a variable NODE. */
731 enum tls_model
732 decl_tls_model (const_tree node)
734 struct varpool_node *snode = varpool_node::get (node);
735 if (!snode)
736 return TLS_MODEL_NONE;
737 return snode->tls_model;
740 /* Set TLS model of variable NODE to MODEL. */
741 void
742 set_decl_tls_model (tree node, enum tls_model model)
744 struct varpool_node *vnode;
746 if (model == TLS_MODEL_NONE)
748 vnode = varpool_node::get (node);
749 if (!vnode)
750 return;
752 else
753 vnode = varpool_node::get_create (node);
754 vnode->tls_model = model;
757 /* Compute the number of bytes occupied by a tree with code CODE.
758 This function cannot be used for nodes that have variable sizes,
759 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
760 size_t
761 tree_code_size (enum tree_code code)
763 switch (TREE_CODE_CLASS (code))
765 case tcc_declaration: /* A decl node */
766 switch (code)
768 case FIELD_DECL: return sizeof (tree_field_decl);
769 case PARM_DECL: return sizeof (tree_parm_decl);
770 case VAR_DECL: return sizeof (tree_var_decl);
771 case LABEL_DECL: return sizeof (tree_label_decl);
772 case RESULT_DECL: return sizeof (tree_result_decl);
773 case CONST_DECL: return sizeof (tree_const_decl);
774 case TYPE_DECL: return sizeof (tree_type_decl);
775 case FUNCTION_DECL: return sizeof (tree_function_decl);
776 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
777 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
778 case NAMESPACE_DECL:
779 case IMPORTED_DECL:
780 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
781 default:
782 gcc_checking_assert (code >= NUM_TREE_CODES);
783 return lang_hooks.tree_size (code);
786 case tcc_type: /* a type node */
787 switch (code)
789 case OFFSET_TYPE:
790 case ENUMERAL_TYPE:
791 case BOOLEAN_TYPE:
792 case INTEGER_TYPE:
793 case REAL_TYPE:
794 case POINTER_TYPE:
795 case REFERENCE_TYPE:
796 case NULLPTR_TYPE:
797 case FIXED_POINT_TYPE:
798 case COMPLEX_TYPE:
799 case VECTOR_TYPE:
800 case ARRAY_TYPE:
801 case RECORD_TYPE:
802 case UNION_TYPE:
803 case QUAL_UNION_TYPE:
804 case VOID_TYPE:
805 case POINTER_BOUNDS_TYPE:
806 case FUNCTION_TYPE:
807 case METHOD_TYPE:
808 case LANG_TYPE: return sizeof (tree_type_non_common);
809 default:
810 gcc_checking_assert (code >= NUM_TREE_CODES);
811 return lang_hooks.tree_size (code);
814 case tcc_reference: /* a reference */
815 case tcc_expression: /* an expression */
816 case tcc_statement: /* an expression with side effects */
817 case tcc_comparison: /* a comparison expression */
818 case tcc_unary: /* a unary arithmetic expression */
819 case tcc_binary: /* a binary arithmetic expression */
820 return (sizeof (struct tree_exp)
821 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
823 case tcc_constant: /* a constant */
824 switch (code)
826 case VOID_CST: return sizeof (tree_typed);
827 case INTEGER_CST: gcc_unreachable ();
828 case REAL_CST: return sizeof (tree_real_cst);
829 case FIXED_CST: return sizeof (tree_fixed_cst);
830 case COMPLEX_CST: return sizeof (tree_complex);
831 case VECTOR_CST: return sizeof (tree_vector);
832 case STRING_CST: gcc_unreachable ();
833 default:
834 gcc_checking_assert (code >= NUM_TREE_CODES);
835 return lang_hooks.tree_size (code);
838 case tcc_exceptional: /* something random, like an identifier. */
839 switch (code)
841 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
842 case TREE_LIST: return sizeof (tree_list);
844 case ERROR_MARK:
845 case PLACEHOLDER_EXPR: return sizeof (tree_common);
847 case TREE_VEC: gcc_unreachable ();
848 case OMP_CLAUSE: gcc_unreachable ();
850 case SSA_NAME: return sizeof (tree_ssa_name);
852 case STATEMENT_LIST: return sizeof (tree_statement_list);
853 case BLOCK: return sizeof (struct tree_block);
854 case CONSTRUCTOR: return sizeof (tree_constructor);
855 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
856 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
858 default:
859 gcc_checking_assert (code >= NUM_TREE_CODES);
860 return lang_hooks.tree_size (code);
863 default:
864 gcc_unreachable ();
868 /* Compute the number of bytes occupied by NODE. This routine only
869 looks at TREE_CODE, except for those nodes that have variable sizes. */
870 size_t
871 tree_size (const_tree node)
873 const enum tree_code code = TREE_CODE (node);
874 switch (code)
876 case INTEGER_CST:
877 return (sizeof (struct tree_int_cst)
878 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
880 case TREE_BINFO:
881 return (offsetof (struct tree_binfo, base_binfos)
882 + vec<tree, va_gc>
883 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
885 case TREE_VEC:
886 return (sizeof (struct tree_vec)
887 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
889 case VECTOR_CST:
890 return (sizeof (struct tree_vector)
891 + (VECTOR_CST_NELTS (node) - 1) * sizeof (tree));
893 case STRING_CST:
894 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
896 case OMP_CLAUSE:
897 return (sizeof (struct tree_omp_clause)
898 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
899 * sizeof (tree));
901 default:
902 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
903 return (sizeof (struct tree_exp)
904 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
905 else
906 return tree_code_size (code);
910 /* Record interesting allocation statistics for a tree node with CODE
911 and LENGTH. */
913 static void
914 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
915 size_t length ATTRIBUTE_UNUSED)
917 enum tree_code_class type = TREE_CODE_CLASS (code);
918 tree_node_kind kind;
920 if (!GATHER_STATISTICS)
921 return;
923 switch (type)
925 case tcc_declaration: /* A decl node */
926 kind = d_kind;
927 break;
929 case tcc_type: /* a type node */
930 kind = t_kind;
931 break;
933 case tcc_statement: /* an expression with side effects */
934 kind = s_kind;
935 break;
937 case tcc_reference: /* a reference */
938 kind = r_kind;
939 break;
941 case tcc_expression: /* an expression */
942 case tcc_comparison: /* a comparison expression */
943 case tcc_unary: /* a unary arithmetic expression */
944 case tcc_binary: /* a binary arithmetic expression */
945 kind = e_kind;
946 break;
948 case tcc_constant: /* a constant */
949 kind = c_kind;
950 break;
952 case tcc_exceptional: /* something random, like an identifier. */
953 switch (code)
955 case IDENTIFIER_NODE:
956 kind = id_kind;
957 break;
959 case TREE_VEC:
960 kind = vec_kind;
961 break;
963 case TREE_BINFO:
964 kind = binfo_kind;
965 break;
967 case SSA_NAME:
968 kind = ssa_name_kind;
969 break;
971 case BLOCK:
972 kind = b_kind;
973 break;
975 case CONSTRUCTOR:
976 kind = constr_kind;
977 break;
979 case OMP_CLAUSE:
980 kind = omp_clause_kind;
981 break;
983 default:
984 kind = x_kind;
985 break;
987 break;
989 case tcc_vl_exp:
990 kind = e_kind;
991 break;
993 default:
994 gcc_unreachable ();
997 tree_code_counts[(int) code]++;
998 tree_node_counts[(int) kind]++;
999 tree_node_sizes[(int) kind] += length;
1002 /* Allocate and return a new UID from the DECL_UID namespace. */
1005 allocate_decl_uid (void)
1007 return next_decl_uid++;
1010 /* Return a newly allocated node of code CODE. For decl and type
1011 nodes, some other fields are initialized. The rest of the node is
1012 initialized to zero. This function cannot be used for TREE_VEC,
1013 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1014 tree_code_size.
1016 Achoo! I got a code in the node. */
1018 tree
1019 make_node (enum tree_code code MEM_STAT_DECL)
1021 tree t;
1022 enum tree_code_class type = TREE_CODE_CLASS (code);
1023 size_t length = tree_code_size (code);
1025 record_node_allocation_statistics (code, length);
1027 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1028 TREE_SET_CODE (t, code);
1030 switch (type)
1032 case tcc_statement:
1033 TREE_SIDE_EFFECTS (t) = 1;
1034 break;
1036 case tcc_declaration:
1037 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1039 if (code == FUNCTION_DECL)
1041 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1042 SET_DECL_MODE (t, FUNCTION_MODE);
1044 else
1045 SET_DECL_ALIGN (t, 1);
1047 DECL_SOURCE_LOCATION (t) = input_location;
1048 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1049 DECL_UID (t) = --next_debug_decl_uid;
1050 else
1052 DECL_UID (t) = allocate_decl_uid ();
1053 SET_DECL_PT_UID (t, -1);
1055 if (TREE_CODE (t) == LABEL_DECL)
1056 LABEL_DECL_UID (t) = -1;
1058 break;
1060 case tcc_type:
1061 TYPE_UID (t) = next_type_uid++;
1062 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1063 TYPE_USER_ALIGN (t) = 0;
1064 TYPE_MAIN_VARIANT (t) = t;
1065 TYPE_CANONICAL (t) = t;
1067 /* Default to no attributes for type, but let target change that. */
1068 TYPE_ATTRIBUTES (t) = NULL_TREE;
1069 targetm.set_default_type_attributes (t);
1071 /* We have not yet computed the alias set for this type. */
1072 TYPE_ALIAS_SET (t) = -1;
1073 break;
1075 case tcc_constant:
1076 TREE_CONSTANT (t) = 1;
1077 break;
1079 case tcc_expression:
1080 switch (code)
1082 case INIT_EXPR:
1083 case MODIFY_EXPR:
1084 case VA_ARG_EXPR:
1085 case PREDECREMENT_EXPR:
1086 case PREINCREMENT_EXPR:
1087 case POSTDECREMENT_EXPR:
1088 case POSTINCREMENT_EXPR:
1089 /* All of these have side-effects, no matter what their
1090 operands are. */
1091 TREE_SIDE_EFFECTS (t) = 1;
1092 break;
1094 default:
1095 break;
1097 break;
1099 case tcc_exceptional:
1100 switch (code)
1102 case TARGET_OPTION_NODE:
1103 TREE_TARGET_OPTION(t)
1104 = ggc_cleared_alloc<struct cl_target_option> ();
1105 break;
1107 case OPTIMIZATION_NODE:
1108 TREE_OPTIMIZATION (t)
1109 = ggc_cleared_alloc<struct cl_optimization> ();
1110 break;
1112 default:
1113 break;
1115 break;
1117 default:
1118 /* Other classes need no special treatment. */
1119 break;
1122 return t;
1125 /* Free tree node. */
1127 void
1128 free_node (tree node)
1130 enum tree_code code = TREE_CODE (node);
1131 if (GATHER_STATISTICS)
1133 tree_code_counts[(int) TREE_CODE (node)]--;
1134 tree_node_counts[(int) t_kind]--;
1135 tree_node_sizes[(int) t_kind] -= tree_size (node);
1137 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1138 vec_free (CONSTRUCTOR_ELTS (node));
1139 else if (code == BLOCK)
1140 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1141 else if (code == TREE_BINFO)
1142 vec_free (BINFO_BASE_ACCESSES (node));
1143 ggc_free (node);
1146 /* Return a new node with the same contents as NODE except that its
1147 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1149 tree
1150 copy_node (tree node MEM_STAT_DECL)
1152 tree t;
1153 enum tree_code code = TREE_CODE (node);
1154 size_t length;
1156 gcc_assert (code != STATEMENT_LIST);
1158 length = tree_size (node);
1159 record_node_allocation_statistics (code, length);
1160 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1161 memcpy (t, node, length);
1163 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1164 TREE_CHAIN (t) = 0;
1165 TREE_ASM_WRITTEN (t) = 0;
1166 TREE_VISITED (t) = 0;
1168 if (TREE_CODE_CLASS (code) == tcc_declaration)
1170 if (code == DEBUG_EXPR_DECL)
1171 DECL_UID (t) = --next_debug_decl_uid;
1172 else
1174 DECL_UID (t) = allocate_decl_uid ();
1175 if (DECL_PT_UID_SET_P (node))
1176 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1178 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1179 && DECL_HAS_VALUE_EXPR_P (node))
1181 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1182 DECL_HAS_VALUE_EXPR_P (t) = 1;
1184 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1185 if (VAR_P (node))
1187 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1188 t->decl_with_vis.symtab_node = NULL;
1190 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1192 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1193 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1195 if (TREE_CODE (node) == FUNCTION_DECL)
1197 DECL_STRUCT_FUNCTION (t) = NULL;
1198 t->decl_with_vis.symtab_node = NULL;
1201 else if (TREE_CODE_CLASS (code) == tcc_type)
1203 TYPE_UID (t) = next_type_uid++;
1204 /* The following is so that the debug code for
1205 the copy is different from the original type.
1206 The two statements usually duplicate each other
1207 (because they clear fields of the same union),
1208 but the optimizer should catch that. */
1209 TYPE_SYMTAB_POINTER (t) = 0;
1210 TYPE_SYMTAB_ADDRESS (t) = 0;
1212 /* Do not copy the values cache. */
1213 if (TYPE_CACHED_VALUES_P (t))
1215 TYPE_CACHED_VALUES_P (t) = 0;
1216 TYPE_CACHED_VALUES (t) = NULL_TREE;
1219 else if (code == TARGET_OPTION_NODE)
1221 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1222 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1223 sizeof (struct cl_target_option));
1225 else if (code == OPTIMIZATION_NODE)
1227 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1228 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1229 sizeof (struct cl_optimization));
1232 return t;
1235 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1236 For example, this can copy a list made of TREE_LIST nodes. */
1238 tree
1239 copy_list (tree list)
1241 tree head;
1242 tree prev, next;
1244 if (list == 0)
1245 return 0;
1247 head = prev = copy_node (list);
1248 next = TREE_CHAIN (list);
1249 while (next)
1251 TREE_CHAIN (prev) = copy_node (next);
1252 prev = TREE_CHAIN (prev);
1253 next = TREE_CHAIN (next);
1255 return head;
1259 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1260 INTEGER_CST with value CST and type TYPE. */
1262 static unsigned int
1263 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1265 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1266 /* We need extra HWIs if CST is an unsigned integer with its
1267 upper bit set. */
1268 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1269 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1270 return cst.get_len ();
1273 /* Return a new INTEGER_CST with value CST and type TYPE. */
1275 static tree
1276 build_new_int_cst (tree type, const wide_int &cst)
1278 unsigned int len = cst.get_len ();
1279 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1280 tree nt = make_int_cst (len, ext_len);
1282 if (len < ext_len)
1284 --ext_len;
1285 TREE_INT_CST_ELT (nt, ext_len)
1286 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1287 for (unsigned int i = len; i < ext_len; ++i)
1288 TREE_INT_CST_ELT (nt, i) = -1;
1290 else if (TYPE_UNSIGNED (type)
1291 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1293 len--;
1294 TREE_INT_CST_ELT (nt, len)
1295 = zext_hwi (cst.elt (len),
1296 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1299 for (unsigned int i = 0; i < len; i++)
1300 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1301 TREE_TYPE (nt) = type;
1302 return nt;
1305 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1307 tree
1308 build_int_cst (tree type, HOST_WIDE_INT low)
1310 /* Support legacy code. */
1311 if (!type)
1312 type = integer_type_node;
1314 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1317 tree
1318 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1320 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1323 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1325 tree
1326 build_int_cst_type (tree type, HOST_WIDE_INT low)
1328 gcc_assert (type);
1329 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1332 /* Constructs tree in type TYPE from with value given by CST. Signedness
1333 of CST is assumed to be the same as the signedness of TYPE. */
1335 tree
1336 double_int_to_tree (tree type, double_int cst)
1338 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1341 /* We force the wide_int CST to the range of the type TYPE by sign or
1342 zero extending it. OVERFLOWABLE indicates if we are interested in
1343 overflow of the value, when >0 we are only interested in signed
1344 overflow, for <0 we are interested in any overflow. OVERFLOWED
1345 indicates whether overflow has already occurred. CONST_OVERFLOWED
1346 indicates whether constant overflow has already occurred. We force
1347 T's value to be within range of T's type (by setting to 0 or 1 all
1348 the bits outside the type's range). We set TREE_OVERFLOWED if,
1349 OVERFLOWED is nonzero,
1350 or OVERFLOWABLE is >0 and signed overflow occurs
1351 or OVERFLOWABLE is <0 and any overflow occurs
1352 We return a new tree node for the extended wide_int. The node
1353 is shared if no overflow flags are set. */
1356 tree
1357 force_fit_type (tree type, const wide_int_ref &cst,
1358 int overflowable, bool overflowed)
1360 signop sign = TYPE_SIGN (type);
1362 /* If we need to set overflow flags, return a new unshared node. */
1363 if (overflowed || !wi::fits_to_tree_p (cst, type))
1365 if (overflowed
1366 || overflowable < 0
1367 || (overflowable > 0 && sign == SIGNED))
1369 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1370 tree t = build_new_int_cst (type, tmp);
1371 TREE_OVERFLOW (t) = 1;
1372 return t;
1376 /* Else build a shared node. */
1377 return wide_int_to_tree (type, cst);
1380 /* These are the hash table functions for the hash table of INTEGER_CST
1381 nodes of a sizetype. */
1383 /* Return the hash code X, an INTEGER_CST. */
1385 hashval_t
1386 int_cst_hasher::hash (tree x)
1388 const_tree const t = x;
1389 hashval_t code = TYPE_UID (TREE_TYPE (t));
1390 int i;
1392 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1393 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1395 return code;
1398 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1399 is the same as that given by *Y, which is the same. */
1401 bool
1402 int_cst_hasher::equal (tree x, tree y)
1404 const_tree const xt = x;
1405 const_tree const yt = y;
1407 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1408 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1409 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1410 return false;
1412 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1413 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1414 return false;
1416 return true;
1419 /* Create an INT_CST node of TYPE and value CST.
1420 The returned node is always shared. For small integers we use a
1421 per-type vector cache, for larger ones we use a single hash table.
1422 The value is extended from its precision according to the sign of
1423 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1424 the upper bits and ensures that hashing and value equality based
1425 upon the underlying HOST_WIDE_INTs works without masking. */
1427 tree
1428 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1430 tree t;
1431 int ix = -1;
1432 int limit = 0;
1434 gcc_assert (type);
1435 unsigned int prec = TYPE_PRECISION (type);
1436 signop sgn = TYPE_SIGN (type);
1438 /* Verify that everything is canonical. */
1439 int l = pcst.get_len ();
1440 if (l > 1)
1442 if (pcst.elt (l - 1) == 0)
1443 gcc_checking_assert (pcst.elt (l - 2) < 0);
1444 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1445 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1448 wide_int cst = wide_int::from (pcst, prec, sgn);
1449 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1451 if (ext_len == 1)
1453 /* We just need to store a single HOST_WIDE_INT. */
1454 HOST_WIDE_INT hwi;
1455 if (TYPE_UNSIGNED (type))
1456 hwi = cst.to_uhwi ();
1457 else
1458 hwi = cst.to_shwi ();
1460 switch (TREE_CODE (type))
1462 case NULLPTR_TYPE:
1463 gcc_assert (hwi == 0);
1464 /* Fallthru. */
1466 case POINTER_TYPE:
1467 case REFERENCE_TYPE:
1468 case POINTER_BOUNDS_TYPE:
1469 /* Cache NULL pointer and zero bounds. */
1470 if (hwi == 0)
1472 limit = 1;
1473 ix = 0;
1475 break;
1477 case BOOLEAN_TYPE:
1478 /* Cache false or true. */
1479 limit = 2;
1480 if (IN_RANGE (hwi, 0, 1))
1481 ix = hwi;
1482 break;
1484 case INTEGER_TYPE:
1485 case OFFSET_TYPE:
1486 if (TYPE_SIGN (type) == UNSIGNED)
1488 /* Cache [0, N). */
1489 limit = INTEGER_SHARE_LIMIT;
1490 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1491 ix = hwi;
1493 else
1495 /* Cache [-1, N). */
1496 limit = INTEGER_SHARE_LIMIT + 1;
1497 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1498 ix = hwi + 1;
1500 break;
1502 case ENUMERAL_TYPE:
1503 break;
1505 default:
1506 gcc_unreachable ();
1509 if (ix >= 0)
1511 /* Look for it in the type's vector of small shared ints. */
1512 if (!TYPE_CACHED_VALUES_P (type))
1514 TYPE_CACHED_VALUES_P (type) = 1;
1515 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1518 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1519 if (t)
1520 /* Make sure no one is clobbering the shared constant. */
1521 gcc_checking_assert (TREE_TYPE (t) == type
1522 && TREE_INT_CST_NUNITS (t) == 1
1523 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1524 && TREE_INT_CST_EXT_NUNITS (t) == 1
1525 && TREE_INT_CST_ELT (t, 0) == hwi);
1526 else
1528 /* Create a new shared int. */
1529 t = build_new_int_cst (type, cst);
1530 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1533 else
1535 /* Use the cache of larger shared ints, using int_cst_node as
1536 a temporary. */
1538 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1539 TREE_TYPE (int_cst_node) = type;
1541 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1542 t = *slot;
1543 if (!t)
1545 /* Insert this one into the hash table. */
1546 t = int_cst_node;
1547 *slot = t;
1548 /* Make a new node for next time round. */
1549 int_cst_node = make_int_cst (1, 1);
1553 else
1555 /* The value either hashes properly or we drop it on the floor
1556 for the gc to take care of. There will not be enough of them
1557 to worry about. */
1559 tree nt = build_new_int_cst (type, cst);
1560 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1561 t = *slot;
1562 if (!t)
1564 /* Insert this one into the hash table. */
1565 t = nt;
1566 *slot = t;
1570 return t;
1573 void
1574 cache_integer_cst (tree t)
1576 tree type = TREE_TYPE (t);
1577 int ix = -1;
1578 int limit = 0;
1579 int prec = TYPE_PRECISION (type);
1581 gcc_assert (!TREE_OVERFLOW (t));
1583 switch (TREE_CODE (type))
1585 case NULLPTR_TYPE:
1586 gcc_assert (integer_zerop (t));
1587 /* Fallthru. */
1589 case POINTER_TYPE:
1590 case REFERENCE_TYPE:
1591 /* Cache NULL pointer. */
1592 if (integer_zerop (t))
1594 limit = 1;
1595 ix = 0;
1597 break;
1599 case BOOLEAN_TYPE:
1600 /* Cache false or true. */
1601 limit = 2;
1602 if (wi::ltu_p (wi::to_wide (t), 2))
1603 ix = TREE_INT_CST_ELT (t, 0);
1604 break;
1606 case INTEGER_TYPE:
1607 case OFFSET_TYPE:
1608 if (TYPE_UNSIGNED (type))
1610 /* Cache 0..N */
1611 limit = INTEGER_SHARE_LIMIT;
1613 /* This is a little hokie, but if the prec is smaller than
1614 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1615 obvious test will not get the correct answer. */
1616 if (prec < HOST_BITS_PER_WIDE_INT)
1618 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1619 ix = tree_to_uhwi (t);
1621 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1622 ix = tree_to_uhwi (t);
1624 else
1626 /* Cache -1..N */
1627 limit = INTEGER_SHARE_LIMIT + 1;
1629 if (integer_minus_onep (t))
1630 ix = 0;
1631 else if (!wi::neg_p (wi::to_wide (t)))
1633 if (prec < HOST_BITS_PER_WIDE_INT)
1635 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1636 ix = tree_to_shwi (t) + 1;
1638 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1639 ix = tree_to_shwi (t) + 1;
1642 break;
1644 case ENUMERAL_TYPE:
1645 break;
1647 default:
1648 gcc_unreachable ();
1651 if (ix >= 0)
1653 /* Look for it in the type's vector of small shared ints. */
1654 if (!TYPE_CACHED_VALUES_P (type))
1656 TYPE_CACHED_VALUES_P (type) = 1;
1657 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1660 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1661 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1663 else
1665 /* Use the cache of larger shared ints. */
1666 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1667 /* If there is already an entry for the number verify it's the
1668 same. */
1669 if (*slot)
1670 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1671 else
1672 /* Otherwise insert this one into the hash table. */
1673 *slot = t;
1678 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1679 and the rest are zeros. */
1681 tree
1682 build_low_bits_mask (tree type, unsigned bits)
1684 gcc_assert (bits <= TYPE_PRECISION (type));
1686 return wide_int_to_tree (type, wi::mask (bits, false,
1687 TYPE_PRECISION (type)));
1690 /* Checks that X is integer constant that can be expressed in (unsigned)
1691 HOST_WIDE_INT without loss of precision. */
1693 bool
1694 cst_and_fits_in_hwi (const_tree x)
1696 return (TREE_CODE (x) == INTEGER_CST
1697 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1700 /* Build a newly constructed VECTOR_CST node of length LEN. */
1702 tree
1703 make_vector (unsigned len MEM_STAT_DECL)
1705 tree t;
1706 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1708 record_node_allocation_statistics (VECTOR_CST, length);
1710 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1712 TREE_SET_CODE (t, VECTOR_CST);
1713 TREE_CONSTANT (t) = 1;
1714 VECTOR_CST_NELTS (t) = len;
1716 return t;
1719 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1720 are given by VALS. */
1722 tree
1723 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
1725 unsigned int nelts = vals.length ();
1726 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
1727 int over = 0;
1728 unsigned cnt = 0;
1729 tree v = make_vector (nelts);
1730 TREE_TYPE (v) = type;
1732 /* Iterate through elements and check for overflow. */
1733 for (cnt = 0; cnt < nelts; ++cnt)
1735 tree value = vals[cnt];
1737 VECTOR_CST_ELT (v, cnt) = value;
1739 /* Don't crash if we get an address constant. */
1740 if (!CONSTANT_CLASS_P (value))
1741 continue;
1743 over |= TREE_OVERFLOW (value);
1746 TREE_OVERFLOW (v) = over;
1747 return v;
1750 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1751 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1753 tree
1754 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1756 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1757 unsigned HOST_WIDE_INT idx;
1758 tree value;
1760 auto_vec<tree, 32> vec (nelts);
1761 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1763 if (TREE_CODE (value) == VECTOR_CST)
1764 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1765 vec.quick_push (VECTOR_CST_ELT (value, i));
1766 else
1767 vec.quick_push (value);
1769 while (vec.length () < nelts)
1770 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1772 return build_vector (type, vec);
1775 /* Build a vector of type VECTYPE where all the elements are SCs. */
1776 tree
1777 build_vector_from_val (tree vectype, tree sc)
1779 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1781 if (sc == error_mark_node)
1782 return sc;
1784 /* Verify that the vector type is suitable for SC. Note that there
1785 is some inconsistency in the type-system with respect to restrict
1786 qualifications of pointers. Vector types always have a main-variant
1787 element type and the qualification is applied to the vector-type.
1788 So TREE_TYPE (vector-type) does not return a properly qualified
1789 vector element-type. */
1790 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1791 TREE_TYPE (vectype)));
1793 if (CONSTANT_CLASS_P (sc))
1795 auto_vec<tree, 32> v (nunits);
1796 for (i = 0; i < nunits; ++i)
1797 v.quick_push (sc);
1798 return build_vector (vectype, v);
1800 else
1802 vec<constructor_elt, va_gc> *v;
1803 vec_alloc (v, nunits);
1804 for (i = 0; i < nunits; ++i)
1805 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1806 return build_constructor (vectype, v);
1810 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1811 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1813 void
1814 recompute_constructor_flags (tree c)
1816 unsigned int i;
1817 tree val;
1818 bool constant_p = true;
1819 bool side_effects_p = false;
1820 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1822 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1824 /* Mostly ctors will have elts that don't have side-effects, so
1825 the usual case is to scan all the elements. Hence a single
1826 loop for both const and side effects, rather than one loop
1827 each (with early outs). */
1828 if (!TREE_CONSTANT (val))
1829 constant_p = false;
1830 if (TREE_SIDE_EFFECTS (val))
1831 side_effects_p = true;
1834 TREE_SIDE_EFFECTS (c) = side_effects_p;
1835 TREE_CONSTANT (c) = constant_p;
1838 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1839 CONSTRUCTOR C. */
1841 void
1842 verify_constructor_flags (tree c)
1844 unsigned int i;
1845 tree val;
1846 bool constant_p = TREE_CONSTANT (c);
1847 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1848 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1850 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1852 if (constant_p && !TREE_CONSTANT (val))
1853 internal_error ("non-constant element in constant CONSTRUCTOR");
1854 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1855 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1859 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1860 are in the vec pointed to by VALS. */
1861 tree
1862 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1864 tree c = make_node (CONSTRUCTOR);
1866 TREE_TYPE (c) = type;
1867 CONSTRUCTOR_ELTS (c) = vals;
1869 recompute_constructor_flags (c);
1871 return c;
1874 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1875 INDEX and VALUE. */
1876 tree
1877 build_constructor_single (tree type, tree index, tree value)
1879 vec<constructor_elt, va_gc> *v;
1880 constructor_elt elt = {index, value};
1882 vec_alloc (v, 1);
1883 v->quick_push (elt);
1885 return build_constructor (type, v);
1889 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1890 are in a list pointed to by VALS. */
1891 tree
1892 build_constructor_from_list (tree type, tree vals)
1894 tree t;
1895 vec<constructor_elt, va_gc> *v = NULL;
1897 if (vals)
1899 vec_alloc (v, list_length (vals));
1900 for (t = vals; t; t = TREE_CHAIN (t))
1901 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1904 return build_constructor (type, v);
1907 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1908 of elements, provided as index/value pairs. */
1910 tree
1911 build_constructor_va (tree type, int nelts, ...)
1913 vec<constructor_elt, va_gc> *v = NULL;
1914 va_list p;
1916 va_start (p, nelts);
1917 vec_alloc (v, nelts);
1918 while (nelts--)
1920 tree index = va_arg (p, tree);
1921 tree value = va_arg (p, tree);
1922 CONSTRUCTOR_APPEND_ELT (v, index, value);
1924 va_end (p);
1925 return build_constructor (type, v);
1928 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1930 tree
1931 build_fixed (tree type, FIXED_VALUE_TYPE f)
1933 tree v;
1934 FIXED_VALUE_TYPE *fp;
1936 v = make_node (FIXED_CST);
1937 fp = ggc_alloc<fixed_value> ();
1938 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1940 TREE_TYPE (v) = type;
1941 TREE_FIXED_CST_PTR (v) = fp;
1942 return v;
1945 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1947 tree
1948 build_real (tree type, REAL_VALUE_TYPE d)
1950 tree v;
1951 REAL_VALUE_TYPE *dp;
1952 int overflow = 0;
1954 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1955 Consider doing it via real_convert now. */
1957 v = make_node (REAL_CST);
1958 dp = ggc_alloc<real_value> ();
1959 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1961 TREE_TYPE (v) = type;
1962 TREE_REAL_CST_PTR (v) = dp;
1963 TREE_OVERFLOW (v) = overflow;
1964 return v;
1967 /* Like build_real, but first truncate D to the type. */
1969 tree
1970 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1972 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1975 /* Return a new REAL_CST node whose type is TYPE
1976 and whose value is the integer value of the INTEGER_CST node I. */
1978 REAL_VALUE_TYPE
1979 real_value_from_int_cst (const_tree type, const_tree i)
1981 REAL_VALUE_TYPE d;
1983 /* Clear all bits of the real value type so that we can later do
1984 bitwise comparisons to see if two values are the same. */
1985 memset (&d, 0, sizeof d);
1987 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
1988 TYPE_SIGN (TREE_TYPE (i)));
1989 return d;
1992 /* Given a tree representing an integer constant I, return a tree
1993 representing the same value as a floating-point constant of type TYPE. */
1995 tree
1996 build_real_from_int_cst (tree type, const_tree i)
1998 tree v;
1999 int overflow = TREE_OVERFLOW (i);
2001 v = build_real (type, real_value_from_int_cst (type, i));
2003 TREE_OVERFLOW (v) |= overflow;
2004 return v;
2007 /* Return a newly constructed STRING_CST node whose value is
2008 the LEN characters at STR.
2009 Note that for a C string literal, LEN should include the trailing NUL.
2010 The TREE_TYPE is not initialized. */
2012 tree
2013 build_string (int len, const char *str)
2015 tree s;
2016 size_t length;
2018 /* Do not waste bytes provided by padding of struct tree_string. */
2019 length = len + offsetof (struct tree_string, str) + 1;
2021 record_node_allocation_statistics (STRING_CST, length);
2023 s = (tree) ggc_internal_alloc (length);
2025 memset (s, 0, sizeof (struct tree_typed));
2026 TREE_SET_CODE (s, STRING_CST);
2027 TREE_CONSTANT (s) = 1;
2028 TREE_STRING_LENGTH (s) = len;
2029 memcpy (s->string.str, str, len);
2030 s->string.str[len] = '\0';
2032 return s;
2035 /* Return a newly constructed COMPLEX_CST node whose value is
2036 specified by the real and imaginary parts REAL and IMAG.
2037 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2038 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2040 tree
2041 build_complex (tree type, tree real, tree imag)
2043 tree t = make_node (COMPLEX_CST);
2045 TREE_REALPART (t) = real;
2046 TREE_IMAGPART (t) = imag;
2047 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2048 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2049 return t;
2052 /* Build a complex (inf +- 0i), such as for the result of cproj.
2053 TYPE is the complex tree type of the result. If NEG is true, the
2054 imaginary zero is negative. */
2056 tree
2057 build_complex_inf (tree type, bool neg)
2059 REAL_VALUE_TYPE rinf, rzero = dconst0;
2061 real_inf (&rinf);
2062 rzero.sign = neg;
2063 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2064 build_real (TREE_TYPE (type), rzero));
2067 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2068 element is set to 1. In particular, this is 1 + i for complex types. */
2070 tree
2071 build_each_one_cst (tree type)
2073 if (TREE_CODE (type) == COMPLEX_TYPE)
2075 tree scalar = build_one_cst (TREE_TYPE (type));
2076 return build_complex (type, scalar, scalar);
2078 else
2079 return build_one_cst (type);
2082 /* Return a constant of arithmetic type TYPE which is the
2083 multiplicative identity of the set TYPE. */
2085 tree
2086 build_one_cst (tree type)
2088 switch (TREE_CODE (type))
2090 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2091 case POINTER_TYPE: case REFERENCE_TYPE:
2092 case OFFSET_TYPE:
2093 return build_int_cst (type, 1);
2095 case REAL_TYPE:
2096 return build_real (type, dconst1);
2098 case FIXED_POINT_TYPE:
2099 /* We can only generate 1 for accum types. */
2100 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2101 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2103 case VECTOR_TYPE:
2105 tree scalar = build_one_cst (TREE_TYPE (type));
2107 return build_vector_from_val (type, scalar);
2110 case COMPLEX_TYPE:
2111 return build_complex (type,
2112 build_one_cst (TREE_TYPE (type)),
2113 build_zero_cst (TREE_TYPE (type)));
2115 default:
2116 gcc_unreachable ();
2120 /* Return an integer of type TYPE containing all 1's in as much precision as
2121 it contains, or a complex or vector whose subparts are such integers. */
2123 tree
2124 build_all_ones_cst (tree type)
2126 if (TREE_CODE (type) == COMPLEX_TYPE)
2128 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2129 return build_complex (type, scalar, scalar);
2131 else
2132 return build_minus_one_cst (type);
2135 /* Return a constant of arithmetic type TYPE which is the
2136 opposite of the multiplicative identity of the set TYPE. */
2138 tree
2139 build_minus_one_cst (tree type)
2141 switch (TREE_CODE (type))
2143 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2144 case POINTER_TYPE: case REFERENCE_TYPE:
2145 case OFFSET_TYPE:
2146 return build_int_cst (type, -1);
2148 case REAL_TYPE:
2149 return build_real (type, dconstm1);
2151 case FIXED_POINT_TYPE:
2152 /* We can only generate 1 for accum types. */
2153 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2154 return build_fixed (type,
2155 fixed_from_double_int (double_int_minus_one,
2156 SCALAR_TYPE_MODE (type)));
2158 case VECTOR_TYPE:
2160 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2162 return build_vector_from_val (type, scalar);
2165 case COMPLEX_TYPE:
2166 return build_complex (type,
2167 build_minus_one_cst (TREE_TYPE (type)),
2168 build_zero_cst (TREE_TYPE (type)));
2170 default:
2171 gcc_unreachable ();
2175 /* Build 0 constant of type TYPE. This is used by constructor folding
2176 and thus the constant should be represented in memory by
2177 zero(es). */
2179 tree
2180 build_zero_cst (tree type)
2182 switch (TREE_CODE (type))
2184 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2185 case POINTER_TYPE: case REFERENCE_TYPE:
2186 case OFFSET_TYPE: case NULLPTR_TYPE:
2187 return build_int_cst (type, 0);
2189 case REAL_TYPE:
2190 return build_real (type, dconst0);
2192 case FIXED_POINT_TYPE:
2193 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2195 case VECTOR_TYPE:
2197 tree scalar = build_zero_cst (TREE_TYPE (type));
2199 return build_vector_from_val (type, scalar);
2202 case COMPLEX_TYPE:
2204 tree zero = build_zero_cst (TREE_TYPE (type));
2206 return build_complex (type, zero, zero);
2209 default:
2210 if (!AGGREGATE_TYPE_P (type))
2211 return fold_convert (type, integer_zero_node);
2212 return build_constructor (type, NULL);
2217 /* Build a BINFO with LEN language slots. */
2219 tree
2220 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2222 tree t;
2223 size_t length = (offsetof (struct tree_binfo, base_binfos)
2224 + vec<tree, va_gc>::embedded_size (base_binfos));
2226 record_node_allocation_statistics (TREE_BINFO, length);
2228 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2230 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2232 TREE_SET_CODE (t, TREE_BINFO);
2234 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2236 return t;
2239 /* Create a CASE_LABEL_EXPR tree node and return it. */
2241 tree
2242 build_case_label (tree low_value, tree high_value, tree label_decl)
2244 tree t = make_node (CASE_LABEL_EXPR);
2246 TREE_TYPE (t) = void_type_node;
2247 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2249 CASE_LOW (t) = low_value;
2250 CASE_HIGH (t) = high_value;
2251 CASE_LABEL (t) = label_decl;
2252 CASE_CHAIN (t) = NULL_TREE;
2254 return t;
2257 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2258 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2259 The latter determines the length of the HOST_WIDE_INT vector. */
2261 tree
2262 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2264 tree t;
2265 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2266 + sizeof (struct tree_int_cst));
2268 gcc_assert (len);
2269 record_node_allocation_statistics (INTEGER_CST, length);
2271 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2273 TREE_SET_CODE (t, INTEGER_CST);
2274 TREE_INT_CST_NUNITS (t) = len;
2275 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2276 /* to_offset can only be applied to trees that are offset_int-sized
2277 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2278 must be exactly the precision of offset_int and so LEN is correct. */
2279 if (ext_len <= OFFSET_INT_ELTS)
2280 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2281 else
2282 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2284 TREE_CONSTANT (t) = 1;
2286 return t;
2289 /* Build a newly constructed TREE_VEC node of length LEN. */
2291 tree
2292 make_tree_vec (int len MEM_STAT_DECL)
2294 tree t;
2295 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2297 record_node_allocation_statistics (TREE_VEC, length);
2299 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2301 TREE_SET_CODE (t, TREE_VEC);
2302 TREE_VEC_LENGTH (t) = len;
2304 return t;
2307 /* Grow a TREE_VEC node to new length LEN. */
2309 tree
2310 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2312 gcc_assert (TREE_CODE (v) == TREE_VEC);
2314 int oldlen = TREE_VEC_LENGTH (v);
2315 gcc_assert (len > oldlen);
2317 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2318 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2320 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2322 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2324 TREE_VEC_LENGTH (v) = len;
2326 return v;
2329 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2330 fixed, and scalar, complex or vector. */
2333 zerop (const_tree expr)
2335 return (integer_zerop (expr)
2336 || real_zerop (expr)
2337 || fixed_zerop (expr));
2340 /* Return 1 if EXPR is the integer constant zero or a complex constant
2341 of zero. */
2344 integer_zerop (const_tree expr)
2346 switch (TREE_CODE (expr))
2348 case INTEGER_CST:
2349 return wi::to_wide (expr) == 0;
2350 case COMPLEX_CST:
2351 return (integer_zerop (TREE_REALPART (expr))
2352 && integer_zerop (TREE_IMAGPART (expr)));
2353 case VECTOR_CST:
2355 unsigned i;
2356 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2357 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2358 return false;
2359 return true;
2361 default:
2362 return false;
2366 /* Return 1 if EXPR is the integer constant one or the corresponding
2367 complex constant. */
2370 integer_onep (const_tree expr)
2372 switch (TREE_CODE (expr))
2374 case INTEGER_CST:
2375 return wi::eq_p (wi::to_widest (expr), 1);
2376 case COMPLEX_CST:
2377 return (integer_onep (TREE_REALPART (expr))
2378 && integer_zerop (TREE_IMAGPART (expr)));
2379 case VECTOR_CST:
2381 unsigned i;
2382 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2383 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2384 return false;
2385 return true;
2387 default:
2388 return false;
2392 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2393 return 1 if every piece is the integer constant one. */
2396 integer_each_onep (const_tree expr)
2398 if (TREE_CODE (expr) == COMPLEX_CST)
2399 return (integer_onep (TREE_REALPART (expr))
2400 && integer_onep (TREE_IMAGPART (expr)));
2401 else
2402 return integer_onep (expr);
2405 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2406 it contains, or a complex or vector whose subparts are such integers. */
2409 integer_all_onesp (const_tree expr)
2411 if (TREE_CODE (expr) == COMPLEX_CST
2412 && integer_all_onesp (TREE_REALPART (expr))
2413 && integer_all_onesp (TREE_IMAGPART (expr)))
2414 return 1;
2416 else if (TREE_CODE (expr) == VECTOR_CST)
2418 unsigned i;
2419 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2420 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2421 return 0;
2422 return 1;
2425 else if (TREE_CODE (expr) != INTEGER_CST)
2426 return 0;
2428 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2429 == wi::to_wide (expr));
2432 /* Return 1 if EXPR is the integer constant minus one. */
2435 integer_minus_onep (const_tree expr)
2437 if (TREE_CODE (expr) == COMPLEX_CST)
2438 return (integer_all_onesp (TREE_REALPART (expr))
2439 && integer_zerop (TREE_IMAGPART (expr)));
2440 else
2441 return integer_all_onesp (expr);
2444 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2445 one bit on). */
2448 integer_pow2p (const_tree expr)
2450 if (TREE_CODE (expr) == COMPLEX_CST
2451 && integer_pow2p (TREE_REALPART (expr))
2452 && integer_zerop (TREE_IMAGPART (expr)))
2453 return 1;
2455 if (TREE_CODE (expr) != INTEGER_CST)
2456 return 0;
2458 return wi::popcount (wi::to_wide (expr)) == 1;
2461 /* Return 1 if EXPR is an integer constant other than zero or a
2462 complex constant other than zero. */
2465 integer_nonzerop (const_tree expr)
2467 return ((TREE_CODE (expr) == INTEGER_CST
2468 && wi::to_wide (expr) != 0)
2469 || (TREE_CODE (expr) == COMPLEX_CST
2470 && (integer_nonzerop (TREE_REALPART (expr))
2471 || integer_nonzerop (TREE_IMAGPART (expr)))));
2474 /* Return 1 if EXPR is the integer constant one. For vector,
2475 return 1 if every piece is the integer constant minus one
2476 (representing the value TRUE). */
2479 integer_truep (const_tree expr)
2481 if (TREE_CODE (expr) == VECTOR_CST)
2482 return integer_all_onesp (expr);
2483 return integer_onep (expr);
2486 /* Return 1 if EXPR is the fixed-point constant zero. */
2489 fixed_zerop (const_tree expr)
2491 return (TREE_CODE (expr) == FIXED_CST
2492 && TREE_FIXED_CST (expr).data.is_zero ());
2495 /* Return the power of two represented by a tree node known to be a
2496 power of two. */
2499 tree_log2 (const_tree expr)
2501 if (TREE_CODE (expr) == COMPLEX_CST)
2502 return tree_log2 (TREE_REALPART (expr));
2504 return wi::exact_log2 (wi::to_wide (expr));
2507 /* Similar, but return the largest integer Y such that 2 ** Y is less
2508 than or equal to EXPR. */
2511 tree_floor_log2 (const_tree expr)
2513 if (TREE_CODE (expr) == COMPLEX_CST)
2514 return tree_log2 (TREE_REALPART (expr));
2516 return wi::floor_log2 (wi::to_wide (expr));
2519 /* Return number of known trailing zero bits in EXPR, or, if the value of
2520 EXPR is known to be zero, the precision of it's type. */
2522 unsigned int
2523 tree_ctz (const_tree expr)
2525 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2526 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2527 return 0;
2529 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2530 switch (TREE_CODE (expr))
2532 case INTEGER_CST:
2533 ret1 = wi::ctz (wi::to_wide (expr));
2534 return MIN (ret1, prec);
2535 case SSA_NAME:
2536 ret1 = wi::ctz (get_nonzero_bits (expr));
2537 return MIN (ret1, prec);
2538 case PLUS_EXPR:
2539 case MINUS_EXPR:
2540 case BIT_IOR_EXPR:
2541 case BIT_XOR_EXPR:
2542 case MIN_EXPR:
2543 case MAX_EXPR:
2544 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2545 if (ret1 == 0)
2546 return ret1;
2547 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2548 return MIN (ret1, ret2);
2549 case POINTER_PLUS_EXPR:
2550 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2551 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2552 /* Second operand is sizetype, which could be in theory
2553 wider than pointer's precision. Make sure we never
2554 return more than prec. */
2555 ret2 = MIN (ret2, prec);
2556 return MIN (ret1, ret2);
2557 case BIT_AND_EXPR:
2558 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2559 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2560 return MAX (ret1, ret2);
2561 case MULT_EXPR:
2562 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2563 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2564 return MIN (ret1 + ret2, prec);
2565 case LSHIFT_EXPR:
2566 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2567 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2568 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2570 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2571 return MIN (ret1 + ret2, prec);
2573 return ret1;
2574 case RSHIFT_EXPR:
2575 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2576 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2580 if (ret1 > ret2)
2581 return ret1 - ret2;
2583 return 0;
2584 case TRUNC_DIV_EXPR:
2585 case CEIL_DIV_EXPR:
2586 case FLOOR_DIV_EXPR:
2587 case ROUND_DIV_EXPR:
2588 case EXACT_DIV_EXPR:
2589 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2590 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2592 int l = tree_log2 (TREE_OPERAND (expr, 1));
2593 if (l >= 0)
2595 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2596 ret2 = l;
2597 if (ret1 > ret2)
2598 return ret1 - ret2;
2601 return 0;
2602 CASE_CONVERT:
2603 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2604 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2605 ret1 = prec;
2606 return MIN (ret1, prec);
2607 case SAVE_EXPR:
2608 return tree_ctz (TREE_OPERAND (expr, 0));
2609 case COND_EXPR:
2610 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2611 if (ret1 == 0)
2612 return 0;
2613 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2614 return MIN (ret1, ret2);
2615 case COMPOUND_EXPR:
2616 return tree_ctz (TREE_OPERAND (expr, 1));
2617 case ADDR_EXPR:
2618 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2619 if (ret1 > BITS_PER_UNIT)
2621 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2622 return MIN (ret1, prec);
2624 return 0;
2625 default:
2626 return 0;
2630 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2631 decimal float constants, so don't return 1 for them. */
2634 real_zerop (const_tree expr)
2636 switch (TREE_CODE (expr))
2638 case REAL_CST:
2639 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2640 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2641 case COMPLEX_CST:
2642 return real_zerop (TREE_REALPART (expr))
2643 && real_zerop (TREE_IMAGPART (expr));
2644 case VECTOR_CST:
2646 unsigned i;
2647 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2648 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2649 return false;
2650 return true;
2652 default:
2653 return false;
2657 /* Return 1 if EXPR is the real constant one in real or complex form.
2658 Trailing zeroes matter for decimal float constants, so don't return
2659 1 for them. */
2662 real_onep (const_tree expr)
2664 switch (TREE_CODE (expr))
2666 case REAL_CST:
2667 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2668 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2669 case COMPLEX_CST:
2670 return real_onep (TREE_REALPART (expr))
2671 && real_zerop (TREE_IMAGPART (expr));
2672 case VECTOR_CST:
2674 unsigned i;
2675 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2676 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2677 return false;
2678 return true;
2680 default:
2681 return false;
2685 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2686 matter for decimal float constants, so don't return 1 for them. */
2689 real_minus_onep (const_tree expr)
2691 switch (TREE_CODE (expr))
2693 case REAL_CST:
2694 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2695 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2696 case COMPLEX_CST:
2697 return real_minus_onep (TREE_REALPART (expr))
2698 && real_zerop (TREE_IMAGPART (expr));
2699 case VECTOR_CST:
2701 unsigned i;
2702 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2703 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2704 return false;
2705 return true;
2707 default:
2708 return false;
2712 /* Nonzero if EXP is a constant or a cast of a constant. */
2715 really_constant_p (const_tree exp)
2717 /* This is not quite the same as STRIP_NOPS. It does more. */
2718 while (CONVERT_EXPR_P (exp)
2719 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2720 exp = TREE_OPERAND (exp, 0);
2721 return TREE_CONSTANT (exp);
2724 /* Return first list element whose TREE_VALUE is ELEM.
2725 Return 0 if ELEM is not in LIST. */
2727 tree
2728 value_member (tree elem, tree list)
2730 while (list)
2732 if (elem == TREE_VALUE (list))
2733 return list;
2734 list = TREE_CHAIN (list);
2736 return NULL_TREE;
2739 /* Return first list element whose TREE_PURPOSE is ELEM.
2740 Return 0 if ELEM is not in LIST. */
2742 tree
2743 purpose_member (const_tree elem, tree list)
2745 while (list)
2747 if (elem == TREE_PURPOSE (list))
2748 return list;
2749 list = TREE_CHAIN (list);
2751 return NULL_TREE;
2754 /* Return true if ELEM is in V. */
2756 bool
2757 vec_member (const_tree elem, vec<tree, va_gc> *v)
2759 unsigned ix;
2760 tree t;
2761 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2762 if (elem == t)
2763 return true;
2764 return false;
2767 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2768 NULL_TREE. */
2770 tree
2771 chain_index (int idx, tree chain)
2773 for (; chain && idx > 0; --idx)
2774 chain = TREE_CHAIN (chain);
2775 return chain;
2778 /* Return nonzero if ELEM is part of the chain CHAIN. */
2781 chain_member (const_tree elem, const_tree chain)
2783 while (chain)
2785 if (elem == chain)
2786 return 1;
2787 chain = DECL_CHAIN (chain);
2790 return 0;
2793 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2794 We expect a null pointer to mark the end of the chain.
2795 This is the Lisp primitive `length'. */
2798 list_length (const_tree t)
2800 const_tree p = t;
2801 #ifdef ENABLE_TREE_CHECKING
2802 const_tree q = t;
2803 #endif
2804 int len = 0;
2806 while (p)
2808 p = TREE_CHAIN (p);
2809 #ifdef ENABLE_TREE_CHECKING
2810 if (len % 2)
2811 q = TREE_CHAIN (q);
2812 gcc_assert (p != q);
2813 #endif
2814 len++;
2817 return len;
2820 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2821 UNION_TYPE TYPE, or NULL_TREE if none. */
2823 tree
2824 first_field (const_tree type)
2826 tree t = TYPE_FIELDS (type);
2827 while (t && TREE_CODE (t) != FIELD_DECL)
2828 t = TREE_CHAIN (t);
2829 return t;
2832 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2833 by modifying the last node in chain 1 to point to chain 2.
2834 This is the Lisp primitive `nconc'. */
2836 tree
2837 chainon (tree op1, tree op2)
2839 tree t1;
2841 if (!op1)
2842 return op2;
2843 if (!op2)
2844 return op1;
2846 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2847 continue;
2848 TREE_CHAIN (t1) = op2;
2850 #ifdef ENABLE_TREE_CHECKING
2852 tree t2;
2853 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2854 gcc_assert (t2 != t1);
2856 #endif
2858 return op1;
2861 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2863 tree
2864 tree_last (tree chain)
2866 tree next;
2867 if (chain)
2868 while ((next = TREE_CHAIN (chain)))
2869 chain = next;
2870 return chain;
2873 /* Reverse the order of elements in the chain T,
2874 and return the new head of the chain (old last element). */
2876 tree
2877 nreverse (tree t)
2879 tree prev = 0, decl, next;
2880 for (decl = t; decl; decl = next)
2882 /* We shouldn't be using this function to reverse BLOCK chains; we
2883 have blocks_nreverse for that. */
2884 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2885 next = TREE_CHAIN (decl);
2886 TREE_CHAIN (decl) = prev;
2887 prev = decl;
2889 return prev;
2892 /* Return a newly created TREE_LIST node whose
2893 purpose and value fields are PARM and VALUE. */
2895 tree
2896 build_tree_list (tree parm, tree value MEM_STAT_DECL)
2898 tree t = make_node (TREE_LIST PASS_MEM_STAT);
2899 TREE_PURPOSE (t) = parm;
2900 TREE_VALUE (t) = value;
2901 return t;
2904 /* Build a chain of TREE_LIST nodes from a vector. */
2906 tree
2907 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2909 tree ret = NULL_TREE;
2910 tree *pp = &ret;
2911 unsigned int i;
2912 tree t;
2913 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2915 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
2916 pp = &TREE_CHAIN (*pp);
2918 return ret;
2921 /* Return a newly created TREE_LIST node whose
2922 purpose and value fields are PURPOSE and VALUE
2923 and whose TREE_CHAIN is CHAIN. */
2925 tree
2926 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
2928 tree node;
2930 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2931 memset (node, 0, sizeof (struct tree_common));
2933 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2935 TREE_SET_CODE (node, TREE_LIST);
2936 TREE_CHAIN (node) = chain;
2937 TREE_PURPOSE (node) = purpose;
2938 TREE_VALUE (node) = value;
2939 return node;
2942 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2943 trees. */
2945 vec<tree, va_gc> *
2946 ctor_to_vec (tree ctor)
2948 vec<tree, va_gc> *vec;
2949 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2950 unsigned int ix;
2951 tree val;
2953 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2954 vec->quick_push (val);
2956 return vec;
2959 /* Return the size nominally occupied by an object of type TYPE
2960 when it resides in memory. The value is measured in units of bytes,
2961 and its data type is that normally used for type sizes
2962 (which is the first type created by make_signed_type or
2963 make_unsigned_type). */
2965 tree
2966 size_in_bytes_loc (location_t loc, const_tree type)
2968 tree t;
2970 if (type == error_mark_node)
2971 return integer_zero_node;
2973 type = TYPE_MAIN_VARIANT (type);
2974 t = TYPE_SIZE_UNIT (type);
2976 if (t == 0)
2978 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2979 return size_zero_node;
2982 return t;
2985 /* Return the size of TYPE (in bytes) as a wide integer
2986 or return -1 if the size can vary or is larger than an integer. */
2988 HOST_WIDE_INT
2989 int_size_in_bytes (const_tree type)
2991 tree t;
2993 if (type == error_mark_node)
2994 return 0;
2996 type = TYPE_MAIN_VARIANT (type);
2997 t = TYPE_SIZE_UNIT (type);
2999 if (t && tree_fits_uhwi_p (t))
3000 return TREE_INT_CST_LOW (t);
3001 else
3002 return -1;
3005 /* Return the maximum size of TYPE (in bytes) as a wide integer
3006 or return -1 if the size can vary or is larger than an integer. */
3008 HOST_WIDE_INT
3009 max_int_size_in_bytes (const_tree type)
3011 HOST_WIDE_INT size = -1;
3012 tree size_tree;
3014 /* If this is an array type, check for a possible MAX_SIZE attached. */
3016 if (TREE_CODE (type) == ARRAY_TYPE)
3018 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3020 if (size_tree && tree_fits_uhwi_p (size_tree))
3021 size = tree_to_uhwi (size_tree);
3024 /* If we still haven't been able to get a size, see if the language
3025 can compute a maximum size. */
3027 if (size == -1)
3029 size_tree = lang_hooks.types.max_size (type);
3031 if (size_tree && tree_fits_uhwi_p (size_tree))
3032 size = tree_to_uhwi (size_tree);
3035 return size;
3038 /* Return the bit position of FIELD, in bits from the start of the record.
3039 This is a tree of type bitsizetype. */
3041 tree
3042 bit_position (const_tree field)
3044 return bit_from_pos (DECL_FIELD_OFFSET (field),
3045 DECL_FIELD_BIT_OFFSET (field));
3048 /* Return the byte position of FIELD, in bytes from the start of the record.
3049 This is a tree of type sizetype. */
3051 tree
3052 byte_position (const_tree field)
3054 return byte_from_pos (DECL_FIELD_OFFSET (field),
3055 DECL_FIELD_BIT_OFFSET (field));
3058 /* Likewise, but return as an integer. It must be representable in
3059 that way (since it could be a signed value, we don't have the
3060 option of returning -1 like int_size_in_byte can. */
3062 HOST_WIDE_INT
3063 int_byte_position (const_tree field)
3065 return tree_to_shwi (byte_position (field));
3068 /* Return the strictest alignment, in bits, that T is known to have. */
3070 unsigned int
3071 expr_align (const_tree t)
3073 unsigned int align0, align1;
3075 switch (TREE_CODE (t))
3077 CASE_CONVERT: case NON_LVALUE_EXPR:
3078 /* If we have conversions, we know that the alignment of the
3079 object must meet each of the alignments of the types. */
3080 align0 = expr_align (TREE_OPERAND (t, 0));
3081 align1 = TYPE_ALIGN (TREE_TYPE (t));
3082 return MAX (align0, align1);
3084 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3085 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3086 case CLEANUP_POINT_EXPR:
3087 /* These don't change the alignment of an object. */
3088 return expr_align (TREE_OPERAND (t, 0));
3090 case COND_EXPR:
3091 /* The best we can do is say that the alignment is the least aligned
3092 of the two arms. */
3093 align0 = expr_align (TREE_OPERAND (t, 1));
3094 align1 = expr_align (TREE_OPERAND (t, 2));
3095 return MIN (align0, align1);
3097 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3098 meaningfully, it's always 1. */
3099 case LABEL_DECL: case CONST_DECL:
3100 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3101 case FUNCTION_DECL:
3102 gcc_assert (DECL_ALIGN (t) != 0);
3103 return DECL_ALIGN (t);
3105 default:
3106 break;
3109 /* Otherwise take the alignment from that of the type. */
3110 return TYPE_ALIGN (TREE_TYPE (t));
3113 /* Return, as a tree node, the number of elements for TYPE (which is an
3114 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3116 tree
3117 array_type_nelts (const_tree type)
3119 tree index_type, min, max;
3121 /* If they did it with unspecified bounds, then we should have already
3122 given an error about it before we got here. */
3123 if (! TYPE_DOMAIN (type))
3124 return error_mark_node;
3126 index_type = TYPE_DOMAIN (type);
3127 min = TYPE_MIN_VALUE (index_type);
3128 max = TYPE_MAX_VALUE (index_type);
3130 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3131 if (!max)
3132 return error_mark_node;
3134 return (integer_zerop (min)
3135 ? max
3136 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3139 /* If arg is static -- a reference to an object in static storage -- then
3140 return the object. This is not the same as the C meaning of `static'.
3141 If arg isn't static, return NULL. */
3143 tree
3144 staticp (tree arg)
3146 switch (TREE_CODE (arg))
3148 case FUNCTION_DECL:
3149 /* Nested functions are static, even though taking their address will
3150 involve a trampoline as we unnest the nested function and create
3151 the trampoline on the tree level. */
3152 return arg;
3154 case VAR_DECL:
3155 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3156 && ! DECL_THREAD_LOCAL_P (arg)
3157 && ! DECL_DLLIMPORT_P (arg)
3158 ? arg : NULL);
3160 case CONST_DECL:
3161 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3162 ? arg : NULL);
3164 case CONSTRUCTOR:
3165 return TREE_STATIC (arg) ? arg : NULL;
3167 case LABEL_DECL:
3168 case STRING_CST:
3169 return arg;
3171 case COMPONENT_REF:
3172 /* If the thing being referenced is not a field, then it is
3173 something language specific. */
3174 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3176 /* If we are referencing a bitfield, we can't evaluate an
3177 ADDR_EXPR at compile time and so it isn't a constant. */
3178 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3179 return NULL;
3181 return staticp (TREE_OPERAND (arg, 0));
3183 case BIT_FIELD_REF:
3184 return NULL;
3186 case INDIRECT_REF:
3187 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3189 case ARRAY_REF:
3190 case ARRAY_RANGE_REF:
3191 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3192 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3193 return staticp (TREE_OPERAND (arg, 0));
3194 else
3195 return NULL;
3197 case COMPOUND_LITERAL_EXPR:
3198 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3200 default:
3201 return NULL;
3208 /* Return whether OP is a DECL whose address is function-invariant. */
3210 bool
3211 decl_address_invariant_p (const_tree op)
3213 /* The conditions below are slightly less strict than the one in
3214 staticp. */
3216 switch (TREE_CODE (op))
3218 case PARM_DECL:
3219 case RESULT_DECL:
3220 case LABEL_DECL:
3221 case FUNCTION_DECL:
3222 return true;
3224 case VAR_DECL:
3225 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3226 || DECL_THREAD_LOCAL_P (op)
3227 || DECL_CONTEXT (op) == current_function_decl
3228 || decl_function_context (op) == current_function_decl)
3229 return true;
3230 break;
3232 case CONST_DECL:
3233 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3234 || decl_function_context (op) == current_function_decl)
3235 return true;
3236 break;
3238 default:
3239 break;
3242 return false;
3245 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3247 bool
3248 decl_address_ip_invariant_p (const_tree op)
3250 /* The conditions below are slightly less strict than the one in
3251 staticp. */
3253 switch (TREE_CODE (op))
3255 case LABEL_DECL:
3256 case FUNCTION_DECL:
3257 case STRING_CST:
3258 return true;
3260 case VAR_DECL:
3261 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3262 && !DECL_DLLIMPORT_P (op))
3263 || DECL_THREAD_LOCAL_P (op))
3264 return true;
3265 break;
3267 case CONST_DECL:
3268 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3269 return true;
3270 break;
3272 default:
3273 break;
3276 return false;
3280 /* Return true if T is function-invariant (internal function, does
3281 not handle arithmetic; that's handled in skip_simple_arithmetic and
3282 tree_invariant_p). */
3284 static bool
3285 tree_invariant_p_1 (tree t)
3287 tree op;
3289 if (TREE_CONSTANT (t)
3290 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3291 return true;
3293 switch (TREE_CODE (t))
3295 case SAVE_EXPR:
3296 return true;
3298 case ADDR_EXPR:
3299 op = TREE_OPERAND (t, 0);
3300 while (handled_component_p (op))
3302 switch (TREE_CODE (op))
3304 case ARRAY_REF:
3305 case ARRAY_RANGE_REF:
3306 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3307 || TREE_OPERAND (op, 2) != NULL_TREE
3308 || TREE_OPERAND (op, 3) != NULL_TREE)
3309 return false;
3310 break;
3312 case COMPONENT_REF:
3313 if (TREE_OPERAND (op, 2) != NULL_TREE)
3314 return false;
3315 break;
3317 default:;
3319 op = TREE_OPERAND (op, 0);
3322 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3324 default:
3325 break;
3328 return false;
3331 /* Return true if T is function-invariant. */
3333 bool
3334 tree_invariant_p (tree t)
3336 tree inner = skip_simple_arithmetic (t);
3337 return tree_invariant_p_1 (inner);
3340 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3341 Do this to any expression which may be used in more than one place,
3342 but must be evaluated only once.
3344 Normally, expand_expr would reevaluate the expression each time.
3345 Calling save_expr produces something that is evaluated and recorded
3346 the first time expand_expr is called on it. Subsequent calls to
3347 expand_expr just reuse the recorded value.
3349 The call to expand_expr that generates code that actually computes
3350 the value is the first call *at compile time*. Subsequent calls
3351 *at compile time* generate code to use the saved value.
3352 This produces correct result provided that *at run time* control
3353 always flows through the insns made by the first expand_expr
3354 before reaching the other places where the save_expr was evaluated.
3355 You, the caller of save_expr, must make sure this is so.
3357 Constants, and certain read-only nodes, are returned with no
3358 SAVE_EXPR because that is safe. Expressions containing placeholders
3359 are not touched; see tree.def for an explanation of what these
3360 are used for. */
3362 tree
3363 save_expr (tree expr)
3365 tree inner;
3367 /* If the tree evaluates to a constant, then we don't want to hide that
3368 fact (i.e. this allows further folding, and direct checks for constants).
3369 However, a read-only object that has side effects cannot be bypassed.
3370 Since it is no problem to reevaluate literals, we just return the
3371 literal node. */
3372 inner = skip_simple_arithmetic (expr);
3373 if (TREE_CODE (inner) == ERROR_MARK)
3374 return inner;
3376 if (tree_invariant_p_1 (inner))
3377 return expr;
3379 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3380 it means that the size or offset of some field of an object depends on
3381 the value within another field.
3383 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3384 and some variable since it would then need to be both evaluated once and
3385 evaluated more than once. Front-ends must assure this case cannot
3386 happen by surrounding any such subexpressions in their own SAVE_EXPR
3387 and forcing evaluation at the proper time. */
3388 if (contains_placeholder_p (inner))
3389 return expr;
3391 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3393 /* This expression might be placed ahead of a jump to ensure that the
3394 value was computed on both sides of the jump. So make sure it isn't
3395 eliminated as dead. */
3396 TREE_SIDE_EFFECTS (expr) = 1;
3397 return expr;
3400 /* Look inside EXPR into any simple arithmetic operations. Return the
3401 outermost non-arithmetic or non-invariant node. */
3403 tree
3404 skip_simple_arithmetic (tree expr)
3406 /* We don't care about whether this can be used as an lvalue in this
3407 context. */
3408 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3409 expr = TREE_OPERAND (expr, 0);
3411 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3412 a constant, it will be more efficient to not make another SAVE_EXPR since
3413 it will allow better simplification and GCSE will be able to merge the
3414 computations if they actually occur. */
3415 while (true)
3417 if (UNARY_CLASS_P (expr))
3418 expr = TREE_OPERAND (expr, 0);
3419 else if (BINARY_CLASS_P (expr))
3421 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3422 expr = TREE_OPERAND (expr, 0);
3423 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3424 expr = TREE_OPERAND (expr, 1);
3425 else
3426 break;
3428 else
3429 break;
3432 return expr;
3435 /* Look inside EXPR into simple arithmetic operations involving constants.
3436 Return the outermost non-arithmetic or non-constant node. */
3438 tree
3439 skip_simple_constant_arithmetic (tree expr)
3441 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3442 expr = TREE_OPERAND (expr, 0);
3444 while (true)
3446 if (UNARY_CLASS_P (expr))
3447 expr = TREE_OPERAND (expr, 0);
3448 else if (BINARY_CLASS_P (expr))
3450 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3451 expr = TREE_OPERAND (expr, 0);
3452 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3453 expr = TREE_OPERAND (expr, 1);
3454 else
3455 break;
3457 else
3458 break;
3461 return expr;
3464 /* Return which tree structure is used by T. */
3466 enum tree_node_structure_enum
3467 tree_node_structure (const_tree t)
3469 const enum tree_code code = TREE_CODE (t);
3470 return tree_node_structure_for_code (code);
3473 /* Set various status flags when building a CALL_EXPR object T. */
3475 static void
3476 process_call_operands (tree t)
3478 bool side_effects = TREE_SIDE_EFFECTS (t);
3479 bool read_only = false;
3480 int i = call_expr_flags (t);
3482 /* Calls have side-effects, except those to const or pure functions. */
3483 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3484 side_effects = true;
3485 /* Propagate TREE_READONLY of arguments for const functions. */
3486 if (i & ECF_CONST)
3487 read_only = true;
3489 if (!side_effects || read_only)
3490 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3492 tree op = TREE_OPERAND (t, i);
3493 if (op && TREE_SIDE_EFFECTS (op))
3494 side_effects = true;
3495 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3496 read_only = false;
3499 TREE_SIDE_EFFECTS (t) = side_effects;
3500 TREE_READONLY (t) = read_only;
3503 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3504 size or offset that depends on a field within a record. */
3506 bool
3507 contains_placeholder_p (const_tree exp)
3509 enum tree_code code;
3511 if (!exp)
3512 return 0;
3514 code = TREE_CODE (exp);
3515 if (code == PLACEHOLDER_EXPR)
3516 return 1;
3518 switch (TREE_CODE_CLASS (code))
3520 case tcc_reference:
3521 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3522 position computations since they will be converted into a
3523 WITH_RECORD_EXPR involving the reference, which will assume
3524 here will be valid. */
3525 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3527 case tcc_exceptional:
3528 if (code == TREE_LIST)
3529 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3530 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3531 break;
3533 case tcc_unary:
3534 case tcc_binary:
3535 case tcc_comparison:
3536 case tcc_expression:
3537 switch (code)
3539 case COMPOUND_EXPR:
3540 /* Ignoring the first operand isn't quite right, but works best. */
3541 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3543 case COND_EXPR:
3544 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3545 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3546 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3548 case SAVE_EXPR:
3549 /* The save_expr function never wraps anything containing
3550 a PLACEHOLDER_EXPR. */
3551 return 0;
3553 default:
3554 break;
3557 switch (TREE_CODE_LENGTH (code))
3559 case 1:
3560 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3561 case 2:
3562 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3563 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3564 default:
3565 return 0;
3568 case tcc_vl_exp:
3569 switch (code)
3571 case CALL_EXPR:
3573 const_tree arg;
3574 const_call_expr_arg_iterator iter;
3575 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3576 if (CONTAINS_PLACEHOLDER_P (arg))
3577 return 1;
3578 return 0;
3580 default:
3581 return 0;
3584 default:
3585 return 0;
3587 return 0;
3590 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3591 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3592 field positions. */
3594 static bool
3595 type_contains_placeholder_1 (const_tree type)
3597 /* If the size contains a placeholder or the parent type (component type in
3598 the case of arrays) type involves a placeholder, this type does. */
3599 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3600 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3601 || (!POINTER_TYPE_P (type)
3602 && TREE_TYPE (type)
3603 && type_contains_placeholder_p (TREE_TYPE (type))))
3604 return true;
3606 /* Now do type-specific checks. Note that the last part of the check above
3607 greatly limits what we have to do below. */
3608 switch (TREE_CODE (type))
3610 case VOID_TYPE:
3611 case POINTER_BOUNDS_TYPE:
3612 case COMPLEX_TYPE:
3613 case ENUMERAL_TYPE:
3614 case BOOLEAN_TYPE:
3615 case POINTER_TYPE:
3616 case OFFSET_TYPE:
3617 case REFERENCE_TYPE:
3618 case METHOD_TYPE:
3619 case FUNCTION_TYPE:
3620 case VECTOR_TYPE:
3621 case NULLPTR_TYPE:
3622 return false;
3624 case INTEGER_TYPE:
3625 case REAL_TYPE:
3626 case FIXED_POINT_TYPE:
3627 /* Here we just check the bounds. */
3628 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3629 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3631 case ARRAY_TYPE:
3632 /* We have already checked the component type above, so just check
3633 the domain type. Flexible array members have a null domain. */
3634 return TYPE_DOMAIN (type) ?
3635 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3637 case RECORD_TYPE:
3638 case UNION_TYPE:
3639 case QUAL_UNION_TYPE:
3641 tree field;
3643 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3644 if (TREE_CODE (field) == FIELD_DECL
3645 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3646 || (TREE_CODE (type) == QUAL_UNION_TYPE
3647 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3648 || type_contains_placeholder_p (TREE_TYPE (field))))
3649 return true;
3651 return false;
3654 default:
3655 gcc_unreachable ();
3659 /* Wrapper around above function used to cache its result. */
3661 bool
3662 type_contains_placeholder_p (tree type)
3664 bool result;
3666 /* If the contains_placeholder_bits field has been initialized,
3667 then we know the answer. */
3668 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3669 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3671 /* Indicate that we've seen this type node, and the answer is false.
3672 This is what we want to return if we run into recursion via fields. */
3673 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3675 /* Compute the real value. */
3676 result = type_contains_placeholder_1 (type);
3678 /* Store the real value. */
3679 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3681 return result;
3684 /* Push tree EXP onto vector QUEUE if it is not already present. */
3686 static void
3687 push_without_duplicates (tree exp, vec<tree> *queue)
3689 unsigned int i;
3690 tree iter;
3692 FOR_EACH_VEC_ELT (*queue, i, iter)
3693 if (simple_cst_equal (iter, exp) == 1)
3694 break;
3696 if (!iter)
3697 queue->safe_push (exp);
3700 /* Given a tree EXP, find all occurrences of references to fields
3701 in a PLACEHOLDER_EXPR and place them in vector REFS without
3702 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3703 we assume here that EXP contains only arithmetic expressions
3704 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3705 argument list. */
3707 void
3708 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3710 enum tree_code code = TREE_CODE (exp);
3711 tree inner;
3712 int i;
3714 /* We handle TREE_LIST and COMPONENT_REF separately. */
3715 if (code == TREE_LIST)
3717 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3718 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3720 else if (code == COMPONENT_REF)
3722 for (inner = TREE_OPERAND (exp, 0);
3723 REFERENCE_CLASS_P (inner);
3724 inner = TREE_OPERAND (inner, 0))
3727 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3728 push_without_duplicates (exp, refs);
3729 else
3730 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3732 else
3733 switch (TREE_CODE_CLASS (code))
3735 case tcc_constant:
3736 break;
3738 case tcc_declaration:
3739 /* Variables allocated to static storage can stay. */
3740 if (!TREE_STATIC (exp))
3741 push_without_duplicates (exp, refs);
3742 break;
3744 case tcc_expression:
3745 /* This is the pattern built in ada/make_aligning_type. */
3746 if (code == ADDR_EXPR
3747 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3749 push_without_duplicates (exp, refs);
3750 break;
3753 /* Fall through. */
3755 case tcc_exceptional:
3756 case tcc_unary:
3757 case tcc_binary:
3758 case tcc_comparison:
3759 case tcc_reference:
3760 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3761 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3762 break;
3764 case tcc_vl_exp:
3765 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3766 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3767 break;
3769 default:
3770 gcc_unreachable ();
3774 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3775 return a tree with all occurrences of references to F in a
3776 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3777 CONST_DECLs. Note that we assume here that EXP contains only
3778 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3779 occurring only in their argument list. */
3781 tree
3782 substitute_in_expr (tree exp, tree f, tree r)
3784 enum tree_code code = TREE_CODE (exp);
3785 tree op0, op1, op2, op3;
3786 tree new_tree;
3788 /* We handle TREE_LIST and COMPONENT_REF separately. */
3789 if (code == TREE_LIST)
3791 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3792 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3793 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3794 return exp;
3796 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3798 else if (code == COMPONENT_REF)
3800 tree inner;
3802 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3803 and it is the right field, replace it with R. */
3804 for (inner = TREE_OPERAND (exp, 0);
3805 REFERENCE_CLASS_P (inner);
3806 inner = TREE_OPERAND (inner, 0))
3809 /* The field. */
3810 op1 = TREE_OPERAND (exp, 1);
3812 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3813 return r;
3815 /* If this expression hasn't been completed let, leave it alone. */
3816 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3817 return exp;
3819 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3820 if (op0 == TREE_OPERAND (exp, 0))
3821 return exp;
3823 new_tree
3824 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3826 else
3827 switch (TREE_CODE_CLASS (code))
3829 case tcc_constant:
3830 return exp;
3832 case tcc_declaration:
3833 if (exp == f)
3834 return r;
3835 else
3836 return exp;
3838 case tcc_expression:
3839 if (exp == f)
3840 return r;
3842 /* Fall through. */
3844 case tcc_exceptional:
3845 case tcc_unary:
3846 case tcc_binary:
3847 case tcc_comparison:
3848 case tcc_reference:
3849 switch (TREE_CODE_LENGTH (code))
3851 case 0:
3852 return exp;
3854 case 1:
3855 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3856 if (op0 == TREE_OPERAND (exp, 0))
3857 return exp;
3859 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3860 break;
3862 case 2:
3863 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3864 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3866 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3867 return exp;
3869 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3870 break;
3872 case 3:
3873 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3874 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3875 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3877 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3878 && op2 == TREE_OPERAND (exp, 2))
3879 return exp;
3881 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3882 break;
3884 case 4:
3885 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3886 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3887 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3888 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3890 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3891 && op2 == TREE_OPERAND (exp, 2)
3892 && op3 == TREE_OPERAND (exp, 3))
3893 return exp;
3895 new_tree
3896 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3897 break;
3899 default:
3900 gcc_unreachable ();
3902 break;
3904 case tcc_vl_exp:
3906 int i;
3908 new_tree = NULL_TREE;
3910 /* If we are trying to replace F with a constant or with another
3911 instance of one of the arguments of the call, inline back
3912 functions which do nothing else than computing a value from
3913 the arguments they are passed. This makes it possible to
3914 fold partially or entirely the replacement expression. */
3915 if (code == CALL_EXPR)
3917 bool maybe_inline = false;
3918 if (CONSTANT_CLASS_P (r))
3919 maybe_inline = true;
3920 else
3921 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3922 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3924 maybe_inline = true;
3925 break;
3927 if (maybe_inline)
3929 tree t = maybe_inline_call_in_expr (exp);
3930 if (t)
3931 return SUBSTITUTE_IN_EXPR (t, f, r);
3935 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3937 tree op = TREE_OPERAND (exp, i);
3938 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3939 if (new_op != op)
3941 if (!new_tree)
3942 new_tree = copy_node (exp);
3943 TREE_OPERAND (new_tree, i) = new_op;
3947 if (new_tree)
3949 new_tree = fold (new_tree);
3950 if (TREE_CODE (new_tree) == CALL_EXPR)
3951 process_call_operands (new_tree);
3953 else
3954 return exp;
3956 break;
3958 default:
3959 gcc_unreachable ();
3962 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3964 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3965 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3967 return new_tree;
3970 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3971 for it within OBJ, a tree that is an object or a chain of references. */
3973 tree
3974 substitute_placeholder_in_expr (tree exp, tree obj)
3976 enum tree_code code = TREE_CODE (exp);
3977 tree op0, op1, op2, op3;
3978 tree new_tree;
3980 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3981 in the chain of OBJ. */
3982 if (code == PLACEHOLDER_EXPR)
3984 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3985 tree elt;
3987 for (elt = obj; elt != 0;
3988 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3989 || TREE_CODE (elt) == COND_EXPR)
3990 ? TREE_OPERAND (elt, 1)
3991 : (REFERENCE_CLASS_P (elt)
3992 || UNARY_CLASS_P (elt)
3993 || BINARY_CLASS_P (elt)
3994 || VL_EXP_CLASS_P (elt)
3995 || EXPRESSION_CLASS_P (elt))
3996 ? TREE_OPERAND (elt, 0) : 0))
3997 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3998 return elt;
4000 for (elt = obj; elt != 0;
4001 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4002 || TREE_CODE (elt) == COND_EXPR)
4003 ? TREE_OPERAND (elt, 1)
4004 : (REFERENCE_CLASS_P (elt)
4005 || UNARY_CLASS_P (elt)
4006 || BINARY_CLASS_P (elt)
4007 || VL_EXP_CLASS_P (elt)
4008 || EXPRESSION_CLASS_P (elt))
4009 ? TREE_OPERAND (elt, 0) : 0))
4010 if (POINTER_TYPE_P (TREE_TYPE (elt))
4011 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4012 == need_type))
4013 return fold_build1 (INDIRECT_REF, need_type, elt);
4015 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4016 survives until RTL generation, there will be an error. */
4017 return exp;
4020 /* TREE_LIST is special because we need to look at TREE_VALUE
4021 and TREE_CHAIN, not TREE_OPERANDS. */
4022 else if (code == TREE_LIST)
4024 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4025 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4026 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4027 return exp;
4029 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4031 else
4032 switch (TREE_CODE_CLASS (code))
4034 case tcc_constant:
4035 case tcc_declaration:
4036 return exp;
4038 case tcc_exceptional:
4039 case tcc_unary:
4040 case tcc_binary:
4041 case tcc_comparison:
4042 case tcc_expression:
4043 case tcc_reference:
4044 case tcc_statement:
4045 switch (TREE_CODE_LENGTH (code))
4047 case 0:
4048 return exp;
4050 case 1:
4051 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4052 if (op0 == TREE_OPERAND (exp, 0))
4053 return exp;
4055 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4056 break;
4058 case 2:
4059 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4060 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4062 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4063 return exp;
4065 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4066 break;
4068 case 3:
4069 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4070 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4071 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4073 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4074 && op2 == TREE_OPERAND (exp, 2))
4075 return exp;
4077 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4078 break;
4080 case 4:
4081 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4082 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4083 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4084 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4086 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4087 && op2 == TREE_OPERAND (exp, 2)
4088 && op3 == TREE_OPERAND (exp, 3))
4089 return exp;
4091 new_tree
4092 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4093 break;
4095 default:
4096 gcc_unreachable ();
4098 break;
4100 case tcc_vl_exp:
4102 int i;
4104 new_tree = NULL_TREE;
4106 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4108 tree op = TREE_OPERAND (exp, i);
4109 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4110 if (new_op != op)
4112 if (!new_tree)
4113 new_tree = copy_node (exp);
4114 TREE_OPERAND (new_tree, i) = new_op;
4118 if (new_tree)
4120 new_tree = fold (new_tree);
4121 if (TREE_CODE (new_tree) == CALL_EXPR)
4122 process_call_operands (new_tree);
4124 else
4125 return exp;
4127 break;
4129 default:
4130 gcc_unreachable ();
4133 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4135 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4136 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4138 return new_tree;
4142 /* Subroutine of stabilize_reference; this is called for subtrees of
4143 references. Any expression with side-effects must be put in a SAVE_EXPR
4144 to ensure that it is only evaluated once.
4146 We don't put SAVE_EXPR nodes around everything, because assigning very
4147 simple expressions to temporaries causes us to miss good opportunities
4148 for optimizations. Among other things, the opportunity to fold in the
4149 addition of a constant into an addressing mode often gets lost, e.g.
4150 "y[i+1] += x;". In general, we take the approach that we should not make
4151 an assignment unless we are forced into it - i.e., that any non-side effect
4152 operator should be allowed, and that cse should take care of coalescing
4153 multiple utterances of the same expression should that prove fruitful. */
4155 static tree
4156 stabilize_reference_1 (tree e)
4158 tree result;
4159 enum tree_code code = TREE_CODE (e);
4161 /* We cannot ignore const expressions because it might be a reference
4162 to a const array but whose index contains side-effects. But we can
4163 ignore things that are actual constant or that already have been
4164 handled by this function. */
4166 if (tree_invariant_p (e))
4167 return e;
4169 switch (TREE_CODE_CLASS (code))
4171 case tcc_exceptional:
4172 case tcc_type:
4173 case tcc_declaration:
4174 case tcc_comparison:
4175 case tcc_statement:
4176 case tcc_expression:
4177 case tcc_reference:
4178 case tcc_vl_exp:
4179 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4180 so that it will only be evaluated once. */
4181 /* The reference (r) and comparison (<) classes could be handled as
4182 below, but it is generally faster to only evaluate them once. */
4183 if (TREE_SIDE_EFFECTS (e))
4184 return save_expr (e);
4185 return e;
4187 case tcc_constant:
4188 /* Constants need no processing. In fact, we should never reach
4189 here. */
4190 return e;
4192 case tcc_binary:
4193 /* Division is slow and tends to be compiled with jumps,
4194 especially the division by powers of 2 that is often
4195 found inside of an array reference. So do it just once. */
4196 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4197 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4198 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4199 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4200 return save_expr (e);
4201 /* Recursively stabilize each operand. */
4202 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4203 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4204 break;
4206 case tcc_unary:
4207 /* Recursively stabilize each operand. */
4208 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4209 break;
4211 default:
4212 gcc_unreachable ();
4215 TREE_TYPE (result) = TREE_TYPE (e);
4216 TREE_READONLY (result) = TREE_READONLY (e);
4217 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4218 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4220 return result;
4223 /* Stabilize a reference so that we can use it any number of times
4224 without causing its operands to be evaluated more than once.
4225 Returns the stabilized reference. This works by means of save_expr,
4226 so see the caveats in the comments about save_expr.
4228 Also allows conversion expressions whose operands are references.
4229 Any other kind of expression is returned unchanged. */
4231 tree
4232 stabilize_reference (tree ref)
4234 tree result;
4235 enum tree_code code = TREE_CODE (ref);
4237 switch (code)
4239 case VAR_DECL:
4240 case PARM_DECL:
4241 case RESULT_DECL:
4242 /* No action is needed in this case. */
4243 return ref;
4245 CASE_CONVERT:
4246 case FLOAT_EXPR:
4247 case FIX_TRUNC_EXPR:
4248 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4249 break;
4251 case INDIRECT_REF:
4252 result = build_nt (INDIRECT_REF,
4253 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4254 break;
4256 case COMPONENT_REF:
4257 result = build_nt (COMPONENT_REF,
4258 stabilize_reference (TREE_OPERAND (ref, 0)),
4259 TREE_OPERAND (ref, 1), NULL_TREE);
4260 break;
4262 case BIT_FIELD_REF:
4263 result = build_nt (BIT_FIELD_REF,
4264 stabilize_reference (TREE_OPERAND (ref, 0)),
4265 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4266 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4267 break;
4269 case ARRAY_REF:
4270 result = build_nt (ARRAY_REF,
4271 stabilize_reference (TREE_OPERAND (ref, 0)),
4272 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4273 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4274 break;
4276 case ARRAY_RANGE_REF:
4277 result = build_nt (ARRAY_RANGE_REF,
4278 stabilize_reference (TREE_OPERAND (ref, 0)),
4279 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4280 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4281 break;
4283 case COMPOUND_EXPR:
4284 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4285 it wouldn't be ignored. This matters when dealing with
4286 volatiles. */
4287 return stabilize_reference_1 (ref);
4289 /* If arg isn't a kind of lvalue we recognize, make no change.
4290 Caller should recognize the error for an invalid lvalue. */
4291 default:
4292 return ref;
4294 case ERROR_MARK:
4295 return error_mark_node;
4298 TREE_TYPE (result) = TREE_TYPE (ref);
4299 TREE_READONLY (result) = TREE_READONLY (ref);
4300 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4301 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4303 return result;
4306 /* Low-level constructors for expressions. */
4308 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4309 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4311 void
4312 recompute_tree_invariant_for_addr_expr (tree t)
4314 tree node;
4315 bool tc = true, se = false;
4317 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4319 /* We started out assuming this address is both invariant and constant, but
4320 does not have side effects. Now go down any handled components and see if
4321 any of them involve offsets that are either non-constant or non-invariant.
4322 Also check for side-effects.
4324 ??? Note that this code makes no attempt to deal with the case where
4325 taking the address of something causes a copy due to misalignment. */
4327 #define UPDATE_FLAGS(NODE) \
4328 do { tree _node = (NODE); \
4329 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4330 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4332 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4333 node = TREE_OPERAND (node, 0))
4335 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4336 array reference (probably made temporarily by the G++ front end),
4337 so ignore all the operands. */
4338 if ((TREE_CODE (node) == ARRAY_REF
4339 || TREE_CODE (node) == ARRAY_RANGE_REF)
4340 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4342 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4343 if (TREE_OPERAND (node, 2))
4344 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4345 if (TREE_OPERAND (node, 3))
4346 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4348 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4349 FIELD_DECL, apparently. The G++ front end can put something else
4350 there, at least temporarily. */
4351 else if (TREE_CODE (node) == COMPONENT_REF
4352 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4354 if (TREE_OPERAND (node, 2))
4355 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4359 node = lang_hooks.expr_to_decl (node, &tc, &se);
4361 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4362 the address, since &(*a)->b is a form of addition. If it's a constant, the
4363 address is constant too. If it's a decl, its address is constant if the
4364 decl is static. Everything else is not constant and, furthermore,
4365 taking the address of a volatile variable is not volatile. */
4366 if (TREE_CODE (node) == INDIRECT_REF
4367 || TREE_CODE (node) == MEM_REF)
4368 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4369 else if (CONSTANT_CLASS_P (node))
4371 else if (DECL_P (node))
4372 tc &= (staticp (node) != NULL_TREE);
4373 else
4375 tc = false;
4376 se |= TREE_SIDE_EFFECTS (node);
4380 TREE_CONSTANT (t) = tc;
4381 TREE_SIDE_EFFECTS (t) = se;
4382 #undef UPDATE_FLAGS
4385 /* Build an expression of code CODE, data type TYPE, and operands as
4386 specified. Expressions and reference nodes can be created this way.
4387 Constants, decls, types and misc nodes cannot be.
4389 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4390 enough for all extant tree codes. */
4392 tree
4393 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4395 tree t;
4397 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4399 t = make_node (code PASS_MEM_STAT);
4400 TREE_TYPE (t) = tt;
4402 return t;
4405 tree
4406 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4408 int length = sizeof (struct tree_exp);
4409 tree t;
4411 record_node_allocation_statistics (code, length);
4413 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4415 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4417 memset (t, 0, sizeof (struct tree_common));
4419 TREE_SET_CODE (t, code);
4421 TREE_TYPE (t) = type;
4422 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4423 TREE_OPERAND (t, 0) = node;
4424 if (node && !TYPE_P (node))
4426 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4427 TREE_READONLY (t) = TREE_READONLY (node);
4430 if (TREE_CODE_CLASS (code) == tcc_statement)
4431 TREE_SIDE_EFFECTS (t) = 1;
4432 else switch (code)
4434 case VA_ARG_EXPR:
4435 /* All of these have side-effects, no matter what their
4436 operands are. */
4437 TREE_SIDE_EFFECTS (t) = 1;
4438 TREE_READONLY (t) = 0;
4439 break;
4441 case INDIRECT_REF:
4442 /* Whether a dereference is readonly has nothing to do with whether
4443 its operand is readonly. */
4444 TREE_READONLY (t) = 0;
4445 break;
4447 case ADDR_EXPR:
4448 if (node)
4449 recompute_tree_invariant_for_addr_expr (t);
4450 break;
4452 default:
4453 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4454 && node && !TYPE_P (node)
4455 && TREE_CONSTANT (node))
4456 TREE_CONSTANT (t) = 1;
4457 if (TREE_CODE_CLASS (code) == tcc_reference
4458 && node && TREE_THIS_VOLATILE (node))
4459 TREE_THIS_VOLATILE (t) = 1;
4460 break;
4463 return t;
4466 #define PROCESS_ARG(N) \
4467 do { \
4468 TREE_OPERAND (t, N) = arg##N; \
4469 if (arg##N &&!TYPE_P (arg##N)) \
4471 if (TREE_SIDE_EFFECTS (arg##N)) \
4472 side_effects = 1; \
4473 if (!TREE_READONLY (arg##N) \
4474 && !CONSTANT_CLASS_P (arg##N)) \
4475 (void) (read_only = 0); \
4476 if (!TREE_CONSTANT (arg##N)) \
4477 (void) (constant = 0); \
4479 } while (0)
4481 tree
4482 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4484 bool constant, read_only, side_effects, div_by_zero;
4485 tree t;
4487 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4489 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4490 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4491 /* When sizetype precision doesn't match that of pointers
4492 we need to be able to build explicit extensions or truncations
4493 of the offset argument. */
4494 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4495 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4496 && TREE_CODE (arg1) == INTEGER_CST);
4498 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4499 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4500 && ptrofftype_p (TREE_TYPE (arg1)));
4502 t = make_node (code PASS_MEM_STAT);
4503 TREE_TYPE (t) = tt;
4505 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4506 result based on those same flags for the arguments. But if the
4507 arguments aren't really even `tree' expressions, we shouldn't be trying
4508 to do this. */
4510 /* Expressions without side effects may be constant if their
4511 arguments are as well. */
4512 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4513 || TREE_CODE_CLASS (code) == tcc_binary);
4514 read_only = 1;
4515 side_effects = TREE_SIDE_EFFECTS (t);
4517 switch (code)
4519 case TRUNC_DIV_EXPR:
4520 case CEIL_DIV_EXPR:
4521 case FLOOR_DIV_EXPR:
4522 case ROUND_DIV_EXPR:
4523 case EXACT_DIV_EXPR:
4524 case CEIL_MOD_EXPR:
4525 case FLOOR_MOD_EXPR:
4526 case ROUND_MOD_EXPR:
4527 case TRUNC_MOD_EXPR:
4528 div_by_zero = integer_zerop (arg1);
4529 break;
4530 default:
4531 div_by_zero = false;
4534 PROCESS_ARG (0);
4535 PROCESS_ARG (1);
4537 TREE_SIDE_EFFECTS (t) = side_effects;
4538 if (code == MEM_REF)
4540 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4542 tree o = TREE_OPERAND (arg0, 0);
4543 TREE_READONLY (t) = TREE_READONLY (o);
4544 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4547 else
4549 TREE_READONLY (t) = read_only;
4550 /* Don't mark X / 0 as constant. */
4551 TREE_CONSTANT (t) = constant && !div_by_zero;
4552 TREE_THIS_VOLATILE (t)
4553 = (TREE_CODE_CLASS (code) == tcc_reference
4554 && arg0 && TREE_THIS_VOLATILE (arg0));
4557 return t;
4561 tree
4562 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4563 tree arg2 MEM_STAT_DECL)
4565 bool constant, read_only, side_effects;
4566 tree t;
4568 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4569 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4571 t = make_node (code PASS_MEM_STAT);
4572 TREE_TYPE (t) = tt;
4574 read_only = 1;
4576 /* As a special exception, if COND_EXPR has NULL branches, we
4577 assume that it is a gimple statement and always consider
4578 it to have side effects. */
4579 if (code == COND_EXPR
4580 && tt == void_type_node
4581 && arg1 == NULL_TREE
4582 && arg2 == NULL_TREE)
4583 side_effects = true;
4584 else
4585 side_effects = TREE_SIDE_EFFECTS (t);
4587 PROCESS_ARG (0);
4588 PROCESS_ARG (1);
4589 PROCESS_ARG (2);
4591 if (code == COND_EXPR)
4592 TREE_READONLY (t) = read_only;
4594 TREE_SIDE_EFFECTS (t) = side_effects;
4595 TREE_THIS_VOLATILE (t)
4596 = (TREE_CODE_CLASS (code) == tcc_reference
4597 && arg0 && TREE_THIS_VOLATILE (arg0));
4599 return t;
4602 tree
4603 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4604 tree arg2, tree arg3 MEM_STAT_DECL)
4606 bool constant, read_only, side_effects;
4607 tree t;
4609 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4611 t = make_node (code PASS_MEM_STAT);
4612 TREE_TYPE (t) = tt;
4614 side_effects = TREE_SIDE_EFFECTS (t);
4616 PROCESS_ARG (0);
4617 PROCESS_ARG (1);
4618 PROCESS_ARG (2);
4619 PROCESS_ARG (3);
4621 TREE_SIDE_EFFECTS (t) = side_effects;
4622 TREE_THIS_VOLATILE (t)
4623 = (TREE_CODE_CLASS (code) == tcc_reference
4624 && arg0 && TREE_THIS_VOLATILE (arg0));
4626 return t;
4629 tree
4630 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4631 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4633 bool constant, read_only, side_effects;
4634 tree t;
4636 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4638 t = make_node (code PASS_MEM_STAT);
4639 TREE_TYPE (t) = tt;
4641 side_effects = TREE_SIDE_EFFECTS (t);
4643 PROCESS_ARG (0);
4644 PROCESS_ARG (1);
4645 PROCESS_ARG (2);
4646 PROCESS_ARG (3);
4647 PROCESS_ARG (4);
4649 TREE_SIDE_EFFECTS (t) = side_effects;
4650 if (code == TARGET_MEM_REF)
4652 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4654 tree o = TREE_OPERAND (arg0, 0);
4655 TREE_READONLY (t) = TREE_READONLY (o);
4656 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4659 else
4660 TREE_THIS_VOLATILE (t)
4661 = (TREE_CODE_CLASS (code) == tcc_reference
4662 && arg0 && TREE_THIS_VOLATILE (arg0));
4664 return t;
4667 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4668 on the pointer PTR. */
4670 tree
4671 build_simple_mem_ref_loc (location_t loc, tree ptr)
4673 HOST_WIDE_INT offset = 0;
4674 tree ptype = TREE_TYPE (ptr);
4675 tree tem;
4676 /* For convenience allow addresses that collapse to a simple base
4677 and offset. */
4678 if (TREE_CODE (ptr) == ADDR_EXPR
4679 && (handled_component_p (TREE_OPERAND (ptr, 0))
4680 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4682 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4683 gcc_assert (ptr);
4684 ptr = build_fold_addr_expr (ptr);
4685 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4687 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4688 ptr, build_int_cst (ptype, offset));
4689 SET_EXPR_LOCATION (tem, loc);
4690 return tem;
4693 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4695 offset_int
4696 mem_ref_offset (const_tree t)
4698 return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
4701 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4702 offsetted by OFFSET units. */
4704 tree
4705 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4707 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4708 build_fold_addr_expr (base),
4709 build_int_cst (ptr_type_node, offset));
4710 tree addr = build1 (ADDR_EXPR, type, ref);
4711 recompute_tree_invariant_for_addr_expr (addr);
4712 return addr;
4715 /* Similar except don't specify the TREE_TYPE
4716 and leave the TREE_SIDE_EFFECTS as 0.
4717 It is permissible for arguments to be null,
4718 or even garbage if their values do not matter. */
4720 tree
4721 build_nt (enum tree_code code, ...)
4723 tree t;
4724 int length;
4725 int i;
4726 va_list p;
4728 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4730 va_start (p, code);
4732 t = make_node (code);
4733 length = TREE_CODE_LENGTH (code);
4735 for (i = 0; i < length; i++)
4736 TREE_OPERAND (t, i) = va_arg (p, tree);
4738 va_end (p);
4739 return t;
4742 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4743 tree vec. */
4745 tree
4746 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4748 tree ret, t;
4749 unsigned int ix;
4751 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4752 CALL_EXPR_FN (ret) = fn;
4753 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4754 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4755 CALL_EXPR_ARG (ret, ix) = t;
4756 return ret;
4759 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4760 We do NOT enter this node in any sort of symbol table.
4762 LOC is the location of the decl.
4764 layout_decl is used to set up the decl's storage layout.
4765 Other slots are initialized to 0 or null pointers. */
4767 tree
4768 build_decl (location_t loc, enum tree_code code, tree name,
4769 tree type MEM_STAT_DECL)
4771 tree t;
4773 t = make_node (code PASS_MEM_STAT);
4774 DECL_SOURCE_LOCATION (t) = loc;
4776 /* if (type == error_mark_node)
4777 type = integer_type_node; */
4778 /* That is not done, deliberately, so that having error_mark_node
4779 as the type can suppress useless errors in the use of this variable. */
4781 DECL_NAME (t) = name;
4782 TREE_TYPE (t) = type;
4784 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4785 layout_decl (t, 0);
4787 return t;
4790 /* Builds and returns function declaration with NAME and TYPE. */
4792 tree
4793 build_fn_decl (const char *name, tree type)
4795 tree id = get_identifier (name);
4796 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4798 DECL_EXTERNAL (decl) = 1;
4799 TREE_PUBLIC (decl) = 1;
4800 DECL_ARTIFICIAL (decl) = 1;
4801 TREE_NOTHROW (decl) = 1;
4803 return decl;
4806 vec<tree, va_gc> *all_translation_units;
4808 /* Builds a new translation-unit decl with name NAME, queues it in the
4809 global list of translation-unit decls and returns it. */
4811 tree
4812 build_translation_unit_decl (tree name)
4814 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4815 name, NULL_TREE);
4816 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4817 vec_safe_push (all_translation_units, tu);
4818 return tu;
4822 /* BLOCK nodes are used to represent the structure of binding contours
4823 and declarations, once those contours have been exited and their contents
4824 compiled. This information is used for outputting debugging info. */
4826 tree
4827 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4829 tree block = make_node (BLOCK);
4831 BLOCK_VARS (block) = vars;
4832 BLOCK_SUBBLOCKS (block) = subblocks;
4833 BLOCK_SUPERCONTEXT (block) = supercontext;
4834 BLOCK_CHAIN (block) = chain;
4835 return block;
4839 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4841 LOC is the location to use in tree T. */
4843 void
4844 protected_set_expr_location (tree t, location_t loc)
4846 if (CAN_HAVE_LOCATION_P (t))
4847 SET_EXPR_LOCATION (t, loc);
4850 /* Reset the expression *EXPR_P, a size or position.
4852 ??? We could reset all non-constant sizes or positions. But it's cheap
4853 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4855 We need to reset self-referential sizes or positions because they cannot
4856 be gimplified and thus can contain a CALL_EXPR after the gimplification
4857 is finished, which will run afoul of LTO streaming. And they need to be
4858 reset to something essentially dummy but not constant, so as to preserve
4859 the properties of the object they are attached to. */
4861 static inline void
4862 free_lang_data_in_one_sizepos (tree *expr_p)
4864 tree expr = *expr_p;
4865 if (CONTAINS_PLACEHOLDER_P (expr))
4866 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4870 /* Reset all the fields in a binfo node BINFO. We only keep
4871 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4873 static void
4874 free_lang_data_in_binfo (tree binfo)
4876 unsigned i;
4877 tree t;
4879 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4881 BINFO_VIRTUALS (binfo) = NULL_TREE;
4882 BINFO_BASE_ACCESSES (binfo) = NULL;
4883 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4884 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4886 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4887 free_lang_data_in_binfo (t);
4891 /* Reset all language specific information still present in TYPE. */
4893 static void
4894 free_lang_data_in_type (tree type)
4896 gcc_assert (TYPE_P (type));
4898 /* Give the FE a chance to remove its own data first. */
4899 lang_hooks.free_lang_data (type);
4901 TREE_LANG_FLAG_0 (type) = 0;
4902 TREE_LANG_FLAG_1 (type) = 0;
4903 TREE_LANG_FLAG_2 (type) = 0;
4904 TREE_LANG_FLAG_3 (type) = 0;
4905 TREE_LANG_FLAG_4 (type) = 0;
4906 TREE_LANG_FLAG_5 (type) = 0;
4907 TREE_LANG_FLAG_6 (type) = 0;
4909 if (TREE_CODE (type) == FUNCTION_TYPE)
4911 /* Remove the const and volatile qualifiers from arguments. The
4912 C++ front end removes them, but the C front end does not,
4913 leading to false ODR violation errors when merging two
4914 instances of the same function signature compiled by
4915 different front ends. */
4916 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4918 tree arg_type = TREE_VALUE (p);
4920 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4922 int quals = TYPE_QUALS (arg_type)
4923 & ~TYPE_QUAL_CONST
4924 & ~TYPE_QUAL_VOLATILE;
4925 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4926 free_lang_data_in_type (TREE_VALUE (p));
4928 /* C++ FE uses TREE_PURPOSE to store initial values. */
4929 TREE_PURPOSE (p) = NULL;
4932 else if (TREE_CODE (type) == METHOD_TYPE)
4933 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4934 /* C++ FE uses TREE_PURPOSE to store initial values. */
4935 TREE_PURPOSE (p) = NULL;
4936 else if (RECORD_OR_UNION_TYPE_P (type))
4938 /* Remove members that are not FIELD_DECLs (and maybe
4939 TYPE_DECLs) from the field list of an aggregate. These occur
4940 in C++. */
4941 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
4942 if (TREE_CODE (member) == FIELD_DECL
4943 || (TREE_CODE (member) == TYPE_DECL
4944 && !DECL_IGNORED_P (member)
4945 && debug_info_level > DINFO_LEVEL_TERSE
4946 && !is_redundant_typedef (member)))
4947 prev = &DECL_CHAIN (member);
4948 else
4949 *prev = DECL_CHAIN (member);
4951 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4952 and danagle the pointer from time to time. */
4953 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
4954 TYPE_VFIELD (type) = NULL_TREE;
4956 if (TYPE_BINFO (type))
4958 free_lang_data_in_binfo (TYPE_BINFO (type));
4959 /* We need to preserve link to bases and virtual table for all
4960 polymorphic types to make devirtualization machinery working.
4961 Debug output cares only about bases, but output also
4962 virtual table pointers so merging of -fdevirtualize and
4963 -fno-devirtualize units is easier. */
4964 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4965 || !flag_devirtualize)
4966 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4967 && !BINFO_VTABLE (TYPE_BINFO (type)))
4968 || debug_info_level != DINFO_LEVEL_NONE))
4969 TYPE_BINFO (type) = NULL;
4972 else if (INTEGRAL_TYPE_P (type)
4973 || SCALAR_FLOAT_TYPE_P (type)
4974 || FIXED_POINT_TYPE_P (type))
4976 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4977 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4980 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4982 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4983 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4985 if (TYPE_CONTEXT (type)
4986 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4988 tree ctx = TYPE_CONTEXT (type);
4991 ctx = BLOCK_SUPERCONTEXT (ctx);
4993 while (ctx && TREE_CODE (ctx) == BLOCK);
4994 TYPE_CONTEXT (type) = ctx;
4999 /* Return true if DECL may need an assembler name to be set. */
5001 static inline bool
5002 need_assembler_name_p (tree decl)
5004 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5005 Rule merging. This makes type_odr_p to return true on those types during
5006 LTO and by comparing the mangled name, we can say what types are intended
5007 to be equivalent across compilation unit.
5009 We do not store names of type_in_anonymous_namespace_p.
5011 Record, union and enumeration type have linkage that allows use
5012 to check type_in_anonymous_namespace_p. We do not mangle compound types
5013 that always can be compared structurally.
5015 Similarly for builtin types, we compare properties of their main variant.
5016 A special case are integer types where mangling do make differences
5017 between char/signed char/unsigned char etc. Storing name for these makes
5018 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5019 See cp/mangle.c:write_builtin_type for details. */
5021 if (flag_lto_odr_type_mering
5022 && TREE_CODE (decl) == TYPE_DECL
5023 && DECL_NAME (decl)
5024 && decl == TYPE_NAME (TREE_TYPE (decl))
5025 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5026 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5027 && (type_with_linkage_p (TREE_TYPE (decl))
5028 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5029 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5030 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5031 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5032 if (!VAR_OR_FUNCTION_DECL_P (decl))
5033 return false;
5035 /* If DECL already has its assembler name set, it does not need a
5036 new one. */
5037 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5038 || DECL_ASSEMBLER_NAME_SET_P (decl))
5039 return false;
5041 /* Abstract decls do not need an assembler name. */
5042 if (DECL_ABSTRACT_P (decl))
5043 return false;
5045 /* For VAR_DECLs, only static, public and external symbols need an
5046 assembler name. */
5047 if (VAR_P (decl)
5048 && !TREE_STATIC (decl)
5049 && !TREE_PUBLIC (decl)
5050 && !DECL_EXTERNAL (decl))
5051 return false;
5053 if (TREE_CODE (decl) == FUNCTION_DECL)
5055 /* Do not set assembler name on builtins. Allow RTL expansion to
5056 decide whether to expand inline or via a regular call. */
5057 if (DECL_BUILT_IN (decl)
5058 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5059 return false;
5061 /* Functions represented in the callgraph need an assembler name. */
5062 if (cgraph_node::get (decl) != NULL)
5063 return true;
5065 /* Unused and not public functions don't need an assembler name. */
5066 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5067 return false;
5070 return true;
5074 /* Reset all language specific information still present in symbol
5075 DECL. */
5077 static void
5078 free_lang_data_in_decl (tree decl)
5080 gcc_assert (DECL_P (decl));
5082 /* Give the FE a chance to remove its own data first. */
5083 lang_hooks.free_lang_data (decl);
5085 TREE_LANG_FLAG_0 (decl) = 0;
5086 TREE_LANG_FLAG_1 (decl) = 0;
5087 TREE_LANG_FLAG_2 (decl) = 0;
5088 TREE_LANG_FLAG_3 (decl) = 0;
5089 TREE_LANG_FLAG_4 (decl) = 0;
5090 TREE_LANG_FLAG_5 (decl) = 0;
5091 TREE_LANG_FLAG_6 (decl) = 0;
5093 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5094 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5095 if (TREE_CODE (decl) == FIELD_DECL)
5097 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5098 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5099 DECL_QUALIFIER (decl) = NULL_TREE;
5102 if (TREE_CODE (decl) == FUNCTION_DECL)
5104 struct cgraph_node *node;
5105 if (!(node = cgraph_node::get (decl))
5106 || (!node->definition && !node->clones))
5108 if (node)
5109 node->release_body ();
5110 else
5112 release_function_body (decl);
5113 DECL_ARGUMENTS (decl) = NULL;
5114 DECL_RESULT (decl) = NULL;
5115 DECL_INITIAL (decl) = error_mark_node;
5118 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5120 tree t;
5122 /* If DECL has a gimple body, then the context for its
5123 arguments must be DECL. Otherwise, it doesn't really
5124 matter, as we will not be emitting any code for DECL. In
5125 general, there may be other instances of DECL created by
5126 the front end and since PARM_DECLs are generally shared,
5127 their DECL_CONTEXT changes as the replicas of DECL are
5128 created. The only time where DECL_CONTEXT is important
5129 is for the FUNCTION_DECLs that have a gimple body (since
5130 the PARM_DECL will be used in the function's body). */
5131 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5132 DECL_CONTEXT (t) = decl;
5133 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5134 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5135 = target_option_default_node;
5136 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5137 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5138 = optimization_default_node;
5141 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5142 At this point, it is not needed anymore. */
5143 DECL_SAVED_TREE (decl) = NULL_TREE;
5145 /* Clear the abstract origin if it refers to a method.
5146 Otherwise dwarf2out.c will ICE as we splice functions out of
5147 TYPE_FIELDS and thus the origin will not be output
5148 correctly. */
5149 if (DECL_ABSTRACT_ORIGIN (decl)
5150 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5151 && RECORD_OR_UNION_TYPE_P
5152 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5153 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5155 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5156 DECL_VINDEX referring to itself into a vtable slot number as it
5157 should. Happens with functions that are copied and then forgotten
5158 about. Just clear it, it won't matter anymore. */
5159 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5160 DECL_VINDEX (decl) = NULL_TREE;
5162 else if (VAR_P (decl))
5164 if ((DECL_EXTERNAL (decl)
5165 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5166 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5167 DECL_INITIAL (decl) = NULL_TREE;
5169 else if (TREE_CODE (decl) == TYPE_DECL)
5171 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5172 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5173 DECL_INITIAL (decl) = NULL_TREE;
5175 else if (TREE_CODE (decl) == FIELD_DECL)
5176 DECL_INITIAL (decl) = NULL_TREE;
5177 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5178 && DECL_INITIAL (decl)
5179 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5181 /* Strip builtins from the translation-unit BLOCK. We still have targets
5182 without builtin_decl_explicit support and also builtins are shared
5183 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5184 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5185 while (*nextp)
5187 tree var = *nextp;
5188 if (TREE_CODE (var) == FUNCTION_DECL
5189 && DECL_BUILT_IN (var))
5190 *nextp = TREE_CHAIN (var);
5191 else
5192 nextp = &TREE_CHAIN (var);
5198 /* Data used when collecting DECLs and TYPEs for language data removal. */
5200 struct free_lang_data_d
5202 free_lang_data_d () : decls (100), types (100) {}
5204 /* Worklist to avoid excessive recursion. */
5205 auto_vec<tree> worklist;
5207 /* Set of traversed objects. Used to avoid duplicate visits. */
5208 hash_set<tree> pset;
5210 /* Array of symbols to process with free_lang_data_in_decl. */
5211 auto_vec<tree> decls;
5213 /* Array of types to process with free_lang_data_in_type. */
5214 auto_vec<tree> types;
5218 /* Save all language fields needed to generate proper debug information
5219 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5221 static void
5222 save_debug_info_for_decl (tree t)
5224 /*struct saved_debug_info_d *sdi;*/
5226 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5228 /* FIXME. Partial implementation for saving debug info removed. */
5232 /* Save all language fields needed to generate proper debug information
5233 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5235 static void
5236 save_debug_info_for_type (tree t)
5238 /*struct saved_debug_info_d *sdi;*/
5240 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5242 /* FIXME. Partial implementation for saving debug info removed. */
5246 /* Add type or decl T to one of the list of tree nodes that need their
5247 language data removed. The lists are held inside FLD. */
5249 static void
5250 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5252 if (DECL_P (t))
5254 fld->decls.safe_push (t);
5255 if (debug_info_level > DINFO_LEVEL_TERSE)
5256 save_debug_info_for_decl (t);
5258 else if (TYPE_P (t))
5260 fld->types.safe_push (t);
5261 if (debug_info_level > DINFO_LEVEL_TERSE)
5262 save_debug_info_for_type (t);
5264 else
5265 gcc_unreachable ();
5268 /* Push tree node T into FLD->WORKLIST. */
5270 static inline void
5271 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5273 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5274 fld->worklist.safe_push ((t));
5278 /* Operand callback helper for free_lang_data_in_node. *TP is the
5279 subtree operand being considered. */
5281 static tree
5282 find_decls_types_r (tree *tp, int *ws, void *data)
5284 tree t = *tp;
5285 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5287 if (TREE_CODE (t) == TREE_LIST)
5288 return NULL_TREE;
5290 /* Language specific nodes will be removed, so there is no need
5291 to gather anything under them. */
5292 if (is_lang_specific (t))
5294 *ws = 0;
5295 return NULL_TREE;
5298 if (DECL_P (t))
5300 /* Note that walk_tree does not traverse every possible field in
5301 decls, so we have to do our own traversals here. */
5302 add_tree_to_fld_list (t, fld);
5304 fld_worklist_push (DECL_NAME (t), fld);
5305 fld_worklist_push (DECL_CONTEXT (t), fld);
5306 fld_worklist_push (DECL_SIZE (t), fld);
5307 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5309 /* We are going to remove everything under DECL_INITIAL for
5310 TYPE_DECLs. No point walking them. */
5311 if (TREE_CODE (t) != TYPE_DECL)
5312 fld_worklist_push (DECL_INITIAL (t), fld);
5314 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5315 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5317 if (TREE_CODE (t) == FUNCTION_DECL)
5319 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5320 fld_worklist_push (DECL_RESULT (t), fld);
5322 else if (TREE_CODE (t) == TYPE_DECL)
5324 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5326 else if (TREE_CODE (t) == FIELD_DECL)
5328 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5329 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5330 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5331 fld_worklist_push (DECL_FCONTEXT (t), fld);
5334 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5335 && DECL_HAS_VALUE_EXPR_P (t))
5336 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5338 if (TREE_CODE (t) != FIELD_DECL
5339 && TREE_CODE (t) != TYPE_DECL)
5340 fld_worklist_push (TREE_CHAIN (t), fld);
5341 *ws = 0;
5343 else if (TYPE_P (t))
5345 /* Note that walk_tree does not traverse every possible field in
5346 types, so we have to do our own traversals here. */
5347 add_tree_to_fld_list (t, fld);
5349 if (!RECORD_OR_UNION_TYPE_P (t))
5350 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5351 fld_worklist_push (TYPE_SIZE (t), fld);
5352 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5353 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5354 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5355 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5356 fld_worklist_push (TYPE_NAME (t), fld);
5357 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5358 them and thus do not and want not to reach unused pointer types
5359 this way. */
5360 if (!POINTER_TYPE_P (t))
5361 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5362 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5363 if (!RECORD_OR_UNION_TYPE_P (t))
5364 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5365 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5366 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5367 do not and want not to reach unused variants this way. */
5368 if (TYPE_CONTEXT (t))
5370 tree ctx = TYPE_CONTEXT (t);
5371 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5372 So push that instead. */
5373 while (ctx && TREE_CODE (ctx) == BLOCK)
5374 ctx = BLOCK_SUPERCONTEXT (ctx);
5375 fld_worklist_push (ctx, fld);
5377 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5378 and want not to reach unused types this way. */
5380 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5382 unsigned i;
5383 tree tem;
5384 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5385 fld_worklist_push (TREE_TYPE (tem), fld);
5386 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5388 if (RECORD_OR_UNION_TYPE_P (t))
5390 tree tem;
5391 /* Push all TYPE_FIELDS - there can be interleaving interesting
5392 and non-interesting things. */
5393 tem = TYPE_FIELDS (t);
5394 while (tem)
5396 if (TREE_CODE (tem) == FIELD_DECL
5397 || (TREE_CODE (tem) == TYPE_DECL
5398 && !DECL_IGNORED_P (tem)
5399 && debug_info_level > DINFO_LEVEL_TERSE
5400 && !is_redundant_typedef (tem)))
5401 fld_worklist_push (tem, fld);
5402 tem = TREE_CHAIN (tem);
5406 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5407 *ws = 0;
5409 else if (TREE_CODE (t) == BLOCK)
5411 tree tem;
5412 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5413 fld_worklist_push (tem, fld);
5414 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5415 fld_worklist_push (tem, fld);
5416 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5419 if (TREE_CODE (t) != IDENTIFIER_NODE
5420 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5421 fld_worklist_push (TREE_TYPE (t), fld);
5423 return NULL_TREE;
5427 /* Find decls and types in T. */
5429 static void
5430 find_decls_types (tree t, struct free_lang_data_d *fld)
5432 while (1)
5434 if (!fld->pset.contains (t))
5435 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5436 if (fld->worklist.is_empty ())
5437 break;
5438 t = fld->worklist.pop ();
5442 /* Translate all the types in LIST with the corresponding runtime
5443 types. */
5445 static tree
5446 get_eh_types_for_runtime (tree list)
5448 tree head, prev;
5450 if (list == NULL_TREE)
5451 return NULL_TREE;
5453 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5454 prev = head;
5455 list = TREE_CHAIN (list);
5456 while (list)
5458 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5459 TREE_CHAIN (prev) = n;
5460 prev = TREE_CHAIN (prev);
5461 list = TREE_CHAIN (list);
5464 return head;
5468 /* Find decls and types referenced in EH region R and store them in
5469 FLD->DECLS and FLD->TYPES. */
5471 static void
5472 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5474 switch (r->type)
5476 case ERT_CLEANUP:
5477 break;
5479 case ERT_TRY:
5481 eh_catch c;
5483 /* The types referenced in each catch must first be changed to the
5484 EH types used at runtime. This removes references to FE types
5485 in the region. */
5486 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5488 c->type_list = get_eh_types_for_runtime (c->type_list);
5489 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5492 break;
5494 case ERT_ALLOWED_EXCEPTIONS:
5495 r->u.allowed.type_list
5496 = get_eh_types_for_runtime (r->u.allowed.type_list);
5497 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5498 break;
5500 case ERT_MUST_NOT_THROW:
5501 walk_tree (&r->u.must_not_throw.failure_decl,
5502 find_decls_types_r, fld, &fld->pset);
5503 break;
5508 /* Find decls and types referenced in cgraph node N and store them in
5509 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5510 look for *every* kind of DECL and TYPE node reachable from N,
5511 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5512 NAMESPACE_DECLs, etc). */
5514 static void
5515 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5517 basic_block bb;
5518 struct function *fn;
5519 unsigned ix;
5520 tree t;
5522 find_decls_types (n->decl, fld);
5524 if (!gimple_has_body_p (n->decl))
5525 return;
5527 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5529 fn = DECL_STRUCT_FUNCTION (n->decl);
5531 /* Traverse locals. */
5532 FOR_EACH_LOCAL_DECL (fn, ix, t)
5533 find_decls_types (t, fld);
5535 /* Traverse EH regions in FN. */
5537 eh_region r;
5538 FOR_ALL_EH_REGION_FN (r, fn)
5539 find_decls_types_in_eh_region (r, fld);
5542 /* Traverse every statement in FN. */
5543 FOR_EACH_BB_FN (bb, fn)
5545 gphi_iterator psi;
5546 gimple_stmt_iterator si;
5547 unsigned i;
5549 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5551 gphi *phi = psi.phi ();
5553 for (i = 0; i < gimple_phi_num_args (phi); i++)
5555 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5556 find_decls_types (*arg_p, fld);
5560 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5562 gimple *stmt = gsi_stmt (si);
5564 if (is_gimple_call (stmt))
5565 find_decls_types (gimple_call_fntype (stmt), fld);
5567 for (i = 0; i < gimple_num_ops (stmt); i++)
5569 tree arg = gimple_op (stmt, i);
5570 find_decls_types (arg, fld);
5577 /* Find decls and types referenced in varpool node N and store them in
5578 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5579 look for *every* kind of DECL and TYPE node reachable from N,
5580 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5581 NAMESPACE_DECLs, etc). */
5583 static void
5584 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5586 find_decls_types (v->decl, fld);
5589 /* If T needs an assembler name, have one created for it. */
5591 void
5592 assign_assembler_name_if_needed (tree t)
5594 if (need_assembler_name_p (t))
5596 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5597 diagnostics that use input_location to show locus
5598 information. The problem here is that, at this point,
5599 input_location is generally anchored to the end of the file
5600 (since the parser is long gone), so we don't have a good
5601 position to pin it to.
5603 To alleviate this problem, this uses the location of T's
5604 declaration. Examples of this are
5605 testsuite/g++.dg/template/cond2.C and
5606 testsuite/g++.dg/template/pr35240.C. */
5607 location_t saved_location = input_location;
5608 input_location = DECL_SOURCE_LOCATION (t);
5610 decl_assembler_name (t);
5612 input_location = saved_location;
5617 /* Free language specific information for every operand and expression
5618 in every node of the call graph. This process operates in three stages:
5620 1- Every callgraph node and varpool node is traversed looking for
5621 decls and types embedded in them. This is a more exhaustive
5622 search than that done by find_referenced_vars, because it will
5623 also collect individual fields, decls embedded in types, etc.
5625 2- All the decls found are sent to free_lang_data_in_decl.
5627 3- All the types found are sent to free_lang_data_in_type.
5629 The ordering between decls and types is important because
5630 free_lang_data_in_decl sets assembler names, which includes
5631 mangling. So types cannot be freed up until assembler names have
5632 been set up. */
5634 static void
5635 free_lang_data_in_cgraph (void)
5637 struct cgraph_node *n;
5638 varpool_node *v;
5639 struct free_lang_data_d fld;
5640 tree t;
5641 unsigned i;
5642 alias_pair *p;
5644 /* Find decls and types in the body of every function in the callgraph. */
5645 FOR_EACH_FUNCTION (n)
5646 find_decls_types_in_node (n, &fld);
5648 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5649 find_decls_types (p->decl, &fld);
5651 /* Find decls and types in every varpool symbol. */
5652 FOR_EACH_VARIABLE (v)
5653 find_decls_types_in_var (v, &fld);
5655 /* Set the assembler name on every decl found. We need to do this
5656 now because free_lang_data_in_decl will invalidate data needed
5657 for mangling. This breaks mangling on interdependent decls. */
5658 FOR_EACH_VEC_ELT (fld.decls, i, t)
5659 assign_assembler_name_if_needed (t);
5661 /* Traverse every decl found freeing its language data. */
5662 FOR_EACH_VEC_ELT (fld.decls, i, t)
5663 free_lang_data_in_decl (t);
5665 /* Traverse every type found freeing its language data. */
5666 FOR_EACH_VEC_ELT (fld.types, i, t)
5667 free_lang_data_in_type (t);
5668 if (flag_checking)
5670 FOR_EACH_VEC_ELT (fld.types, i, t)
5671 verify_type (t);
5676 /* Free resources that are used by FE but are not needed once they are done. */
5678 static unsigned
5679 free_lang_data (void)
5681 unsigned i;
5683 /* If we are the LTO frontend we have freed lang-specific data already. */
5684 if (in_lto_p
5685 || (!flag_generate_lto && !flag_generate_offload))
5686 return 0;
5688 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5689 if (vec_safe_is_empty (all_translation_units))
5690 build_translation_unit_decl (NULL_TREE);
5692 /* Allocate and assign alias sets to the standard integer types
5693 while the slots are still in the way the frontends generated them. */
5694 for (i = 0; i < itk_none; ++i)
5695 if (integer_types[i])
5696 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5698 /* Traverse the IL resetting language specific information for
5699 operands, expressions, etc. */
5700 free_lang_data_in_cgraph ();
5702 /* Create gimple variants for common types. */
5703 for (unsigned i = 0;
5704 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5705 ++i)
5706 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5708 /* Reset some langhooks. Do not reset types_compatible_p, it may
5709 still be used indirectly via the get_alias_set langhook. */
5710 lang_hooks.dwarf_name = lhd_dwarf_name;
5711 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5712 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5714 /* We do not want the default decl_assembler_name implementation,
5715 rather if we have fixed everything we want a wrapper around it
5716 asserting that all non-local symbols already got their assembler
5717 name and only produce assembler names for local symbols. Or rather
5718 make sure we never call decl_assembler_name on local symbols and
5719 devise a separate, middle-end private scheme for it. */
5721 /* Reset diagnostic machinery. */
5722 tree_diagnostics_defaults (global_dc);
5724 return 0;
5728 namespace {
5730 const pass_data pass_data_ipa_free_lang_data =
5732 SIMPLE_IPA_PASS, /* type */
5733 "*free_lang_data", /* name */
5734 OPTGROUP_NONE, /* optinfo_flags */
5735 TV_IPA_FREE_LANG_DATA, /* tv_id */
5736 0, /* properties_required */
5737 0, /* properties_provided */
5738 0, /* properties_destroyed */
5739 0, /* todo_flags_start */
5740 0, /* todo_flags_finish */
5743 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5745 public:
5746 pass_ipa_free_lang_data (gcc::context *ctxt)
5747 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5750 /* opt_pass methods: */
5751 virtual unsigned int execute (function *) { return free_lang_data (); }
5753 }; // class pass_ipa_free_lang_data
5755 } // anon namespace
5757 simple_ipa_opt_pass *
5758 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5760 return new pass_ipa_free_lang_data (ctxt);
5763 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5764 of the various TYPE_QUAL values. */
5766 static void
5767 set_type_quals (tree type, int type_quals)
5769 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5770 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5771 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5772 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5773 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5776 /* Returns true iff CAND and BASE have equivalent language-specific
5777 qualifiers. */
5779 bool
5780 check_lang_type (const_tree cand, const_tree base)
5782 if (lang_hooks.types.type_hash_eq == NULL)
5783 return true;
5784 /* type_hash_eq currently only applies to these types. */
5785 if (TREE_CODE (cand) != FUNCTION_TYPE
5786 && TREE_CODE (cand) != METHOD_TYPE)
5787 return true;
5788 return lang_hooks.types.type_hash_eq (cand, base);
5791 /* Returns true iff unqualified CAND and BASE are equivalent. */
5793 bool
5794 check_base_type (const_tree cand, const_tree base)
5796 return (TYPE_NAME (cand) == TYPE_NAME (base)
5797 /* Apparently this is needed for Objective-C. */
5798 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5799 /* Check alignment. */
5800 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5801 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5802 TYPE_ATTRIBUTES (base)));
5805 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5807 bool
5808 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5810 return (TYPE_QUALS (cand) == type_quals
5811 && check_base_type (cand, base)
5812 && check_lang_type (cand, base));
5815 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5817 static bool
5818 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5820 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5821 && TYPE_NAME (cand) == TYPE_NAME (base)
5822 /* Apparently this is needed for Objective-C. */
5823 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5824 /* Check alignment. */
5825 && TYPE_ALIGN (cand) == align
5826 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5827 TYPE_ATTRIBUTES (base))
5828 && check_lang_type (cand, base));
5831 /* This function checks to see if TYPE matches the size one of the built-in
5832 atomic types, and returns that core atomic type. */
5834 static tree
5835 find_atomic_core_type (tree type)
5837 tree base_atomic_type;
5839 /* Only handle complete types. */
5840 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5841 return NULL_TREE;
5843 switch (tree_to_uhwi (TYPE_SIZE (type)))
5845 case 8:
5846 base_atomic_type = atomicQI_type_node;
5847 break;
5849 case 16:
5850 base_atomic_type = atomicHI_type_node;
5851 break;
5853 case 32:
5854 base_atomic_type = atomicSI_type_node;
5855 break;
5857 case 64:
5858 base_atomic_type = atomicDI_type_node;
5859 break;
5861 case 128:
5862 base_atomic_type = atomicTI_type_node;
5863 break;
5865 default:
5866 base_atomic_type = NULL_TREE;
5869 return base_atomic_type;
5872 /* Return a version of the TYPE, qualified as indicated by the
5873 TYPE_QUALS, if one exists. If no qualified version exists yet,
5874 return NULL_TREE. */
5876 tree
5877 get_qualified_type (tree type, int type_quals)
5879 tree t;
5881 if (TYPE_QUALS (type) == type_quals)
5882 return type;
5884 /* Search the chain of variants to see if there is already one there just
5885 like the one we need to have. If so, use that existing one. We must
5886 preserve the TYPE_NAME, since there is code that depends on this. */
5887 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5888 if (check_qualified_type (t, type, type_quals))
5889 return t;
5891 return NULL_TREE;
5894 /* Like get_qualified_type, but creates the type if it does not
5895 exist. This function never returns NULL_TREE. */
5897 tree
5898 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5900 tree t;
5902 /* See if we already have the appropriate qualified variant. */
5903 t = get_qualified_type (type, type_quals);
5905 /* If not, build it. */
5906 if (!t)
5908 t = build_variant_type_copy (type PASS_MEM_STAT);
5909 set_type_quals (t, type_quals);
5911 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5913 /* See if this object can map to a basic atomic type. */
5914 tree atomic_type = find_atomic_core_type (type);
5915 if (atomic_type)
5917 /* Ensure the alignment of this type is compatible with
5918 the required alignment of the atomic type. */
5919 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5920 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5924 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5925 /* Propagate structural equality. */
5926 SET_TYPE_STRUCTURAL_EQUALITY (t);
5927 else if (TYPE_CANONICAL (type) != type)
5928 /* Build the underlying canonical type, since it is different
5929 from TYPE. */
5931 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5932 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5934 else
5935 /* T is its own canonical type. */
5936 TYPE_CANONICAL (t) = t;
5940 return t;
5943 /* Create a variant of type T with alignment ALIGN. */
5945 tree
5946 build_aligned_type (tree type, unsigned int align)
5948 tree t;
5950 if (TYPE_PACKED (type)
5951 || TYPE_ALIGN (type) == align)
5952 return type;
5954 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5955 if (check_aligned_type (t, type, align))
5956 return t;
5958 t = build_variant_type_copy (type);
5959 SET_TYPE_ALIGN (t, align);
5960 TYPE_USER_ALIGN (t) = 1;
5962 return t;
5965 /* Create a new distinct copy of TYPE. The new type is made its own
5966 MAIN_VARIANT. If TYPE requires structural equality checks, the
5967 resulting type requires structural equality checks; otherwise, its
5968 TYPE_CANONICAL points to itself. */
5970 tree
5971 build_distinct_type_copy (tree type MEM_STAT_DECL)
5973 tree t = copy_node (type PASS_MEM_STAT);
5975 TYPE_POINTER_TO (t) = 0;
5976 TYPE_REFERENCE_TO (t) = 0;
5978 /* Set the canonical type either to a new equivalence class, or
5979 propagate the need for structural equality checks. */
5980 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5981 SET_TYPE_STRUCTURAL_EQUALITY (t);
5982 else
5983 TYPE_CANONICAL (t) = t;
5985 /* Make it its own variant. */
5986 TYPE_MAIN_VARIANT (t) = t;
5987 TYPE_NEXT_VARIANT (t) = 0;
5989 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5990 whose TREE_TYPE is not t. This can also happen in the Ada
5991 frontend when using subtypes. */
5993 return t;
5996 /* Create a new variant of TYPE, equivalent but distinct. This is so
5997 the caller can modify it. TYPE_CANONICAL for the return type will
5998 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5999 are considered equal by the language itself (or that both types
6000 require structural equality checks). */
6002 tree
6003 build_variant_type_copy (tree type MEM_STAT_DECL)
6005 tree t, m = TYPE_MAIN_VARIANT (type);
6007 t = build_distinct_type_copy (type PASS_MEM_STAT);
6009 /* Since we're building a variant, assume that it is a non-semantic
6010 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6011 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6012 /* Type variants have no alias set defined. */
6013 TYPE_ALIAS_SET (t) = -1;
6015 /* Add the new type to the chain of variants of TYPE. */
6016 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6017 TYPE_NEXT_VARIANT (m) = t;
6018 TYPE_MAIN_VARIANT (t) = m;
6020 return t;
6023 /* Return true if the from tree in both tree maps are equal. */
6026 tree_map_base_eq (const void *va, const void *vb)
6028 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6029 *const b = (const struct tree_map_base *) vb;
6030 return (a->from == b->from);
6033 /* Hash a from tree in a tree_base_map. */
6035 unsigned int
6036 tree_map_base_hash (const void *item)
6038 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6041 /* Return true if this tree map structure is marked for garbage collection
6042 purposes. We simply return true if the from tree is marked, so that this
6043 structure goes away when the from tree goes away. */
6046 tree_map_base_marked_p (const void *p)
6048 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6051 /* Hash a from tree in a tree_map. */
6053 unsigned int
6054 tree_map_hash (const void *item)
6056 return (((const struct tree_map *) item)->hash);
6059 /* Hash a from tree in a tree_decl_map. */
6061 unsigned int
6062 tree_decl_map_hash (const void *item)
6064 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6067 /* Return the initialization priority for DECL. */
6069 priority_type
6070 decl_init_priority_lookup (tree decl)
6072 symtab_node *snode = symtab_node::get (decl);
6074 if (!snode)
6075 return DEFAULT_INIT_PRIORITY;
6076 return
6077 snode->get_init_priority ();
6080 /* Return the finalization priority for DECL. */
6082 priority_type
6083 decl_fini_priority_lookup (tree decl)
6085 cgraph_node *node = cgraph_node::get (decl);
6087 if (!node)
6088 return DEFAULT_INIT_PRIORITY;
6089 return
6090 node->get_fini_priority ();
6093 /* Set the initialization priority for DECL to PRIORITY. */
6095 void
6096 decl_init_priority_insert (tree decl, priority_type priority)
6098 struct symtab_node *snode;
6100 if (priority == DEFAULT_INIT_PRIORITY)
6102 snode = symtab_node::get (decl);
6103 if (!snode)
6104 return;
6106 else if (VAR_P (decl))
6107 snode = varpool_node::get_create (decl);
6108 else
6109 snode = cgraph_node::get_create (decl);
6110 snode->set_init_priority (priority);
6113 /* Set the finalization priority for DECL to PRIORITY. */
6115 void
6116 decl_fini_priority_insert (tree decl, priority_type priority)
6118 struct cgraph_node *node;
6120 if (priority == DEFAULT_INIT_PRIORITY)
6122 node = cgraph_node::get (decl);
6123 if (!node)
6124 return;
6126 else
6127 node = cgraph_node::get_create (decl);
6128 node->set_fini_priority (priority);
6131 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6133 static void
6134 print_debug_expr_statistics (void)
6136 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6137 (long) debug_expr_for_decl->size (),
6138 (long) debug_expr_for_decl->elements (),
6139 debug_expr_for_decl->collisions ());
6142 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6144 static void
6145 print_value_expr_statistics (void)
6147 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6148 (long) value_expr_for_decl->size (),
6149 (long) value_expr_for_decl->elements (),
6150 value_expr_for_decl->collisions ());
6153 /* Lookup a debug expression for FROM, and return it if we find one. */
6155 tree
6156 decl_debug_expr_lookup (tree from)
6158 struct tree_decl_map *h, in;
6159 in.base.from = from;
6161 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6162 if (h)
6163 return h->to;
6164 return NULL_TREE;
6167 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6169 void
6170 decl_debug_expr_insert (tree from, tree to)
6172 struct tree_decl_map *h;
6174 h = ggc_alloc<tree_decl_map> ();
6175 h->base.from = from;
6176 h->to = to;
6177 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6180 /* Lookup a value expression for FROM, and return it if we find one. */
6182 tree
6183 decl_value_expr_lookup (tree from)
6185 struct tree_decl_map *h, in;
6186 in.base.from = from;
6188 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6189 if (h)
6190 return h->to;
6191 return NULL_TREE;
6194 /* Insert a mapping FROM->TO in the value expression hashtable. */
6196 void
6197 decl_value_expr_insert (tree from, tree to)
6199 struct tree_decl_map *h;
6201 h = ggc_alloc<tree_decl_map> ();
6202 h->base.from = from;
6203 h->to = to;
6204 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6207 /* Lookup a vector of debug arguments for FROM, and return it if we
6208 find one. */
6210 vec<tree, va_gc> **
6211 decl_debug_args_lookup (tree from)
6213 struct tree_vec_map *h, in;
6215 if (!DECL_HAS_DEBUG_ARGS_P (from))
6216 return NULL;
6217 gcc_checking_assert (debug_args_for_decl != NULL);
6218 in.base.from = from;
6219 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6220 if (h)
6221 return &h->to;
6222 return NULL;
6225 /* Insert a mapping FROM->empty vector of debug arguments in the value
6226 expression hashtable. */
6228 vec<tree, va_gc> **
6229 decl_debug_args_insert (tree from)
6231 struct tree_vec_map *h;
6232 tree_vec_map **loc;
6234 if (DECL_HAS_DEBUG_ARGS_P (from))
6235 return decl_debug_args_lookup (from);
6236 if (debug_args_for_decl == NULL)
6237 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6238 h = ggc_alloc<tree_vec_map> ();
6239 h->base.from = from;
6240 h->to = NULL;
6241 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6242 *loc = h;
6243 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6244 return &h->to;
6247 /* Hashing of types so that we don't make duplicates.
6248 The entry point is `type_hash_canon'. */
6250 /* Generate the default hash code for TYPE. This is designed for
6251 speed, rather than maximum entropy. */
6253 hashval_t
6254 type_hash_canon_hash (tree type)
6256 inchash::hash hstate;
6258 hstate.add_int (TREE_CODE (type));
6260 if (TREE_TYPE (type))
6261 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6263 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6264 /* Just the identifier is adequate to distinguish. */
6265 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6267 switch (TREE_CODE (type))
6269 case METHOD_TYPE:
6270 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6271 /* FALLTHROUGH. */
6272 case FUNCTION_TYPE:
6273 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6274 if (TREE_VALUE (t) != error_mark_node)
6275 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6276 break;
6278 case OFFSET_TYPE:
6279 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6280 break;
6282 case ARRAY_TYPE:
6284 if (TYPE_DOMAIN (type))
6285 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6286 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6288 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6289 hstate.add_object (typeless);
6292 break;
6294 case INTEGER_TYPE:
6296 tree t = TYPE_MAX_VALUE (type);
6297 if (!t)
6298 t = TYPE_MIN_VALUE (type);
6299 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6300 hstate.add_object (TREE_INT_CST_ELT (t, i));
6301 break;
6304 case REAL_TYPE:
6305 case FIXED_POINT_TYPE:
6307 unsigned prec = TYPE_PRECISION (type);
6308 hstate.add_object (prec);
6309 break;
6312 case VECTOR_TYPE:
6314 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
6315 hstate.add_object (nunits);
6316 break;
6319 default:
6320 break;
6323 return hstate.end ();
6326 /* These are the Hashtable callback functions. */
6328 /* Returns true iff the types are equivalent. */
6330 bool
6331 type_cache_hasher::equal (type_hash *a, type_hash *b)
6333 /* First test the things that are the same for all types. */
6334 if (a->hash != b->hash
6335 || TREE_CODE (a->type) != TREE_CODE (b->type)
6336 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6337 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6338 TYPE_ATTRIBUTES (b->type))
6339 || (TREE_CODE (a->type) != COMPLEX_TYPE
6340 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6341 return 0;
6343 /* Be careful about comparing arrays before and after the element type
6344 has been completed; don't compare TYPE_ALIGN unless both types are
6345 complete. */
6346 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6347 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6348 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6349 return 0;
6351 switch (TREE_CODE (a->type))
6353 case VOID_TYPE:
6354 case COMPLEX_TYPE:
6355 case POINTER_TYPE:
6356 case REFERENCE_TYPE:
6357 case NULLPTR_TYPE:
6358 return 1;
6360 case VECTOR_TYPE:
6361 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6363 case ENUMERAL_TYPE:
6364 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6365 && !(TYPE_VALUES (a->type)
6366 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6367 && TYPE_VALUES (b->type)
6368 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6369 && type_list_equal (TYPE_VALUES (a->type),
6370 TYPE_VALUES (b->type))))
6371 return 0;
6373 /* fall through */
6375 case INTEGER_TYPE:
6376 case REAL_TYPE:
6377 case BOOLEAN_TYPE:
6378 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6379 return false;
6380 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6381 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6382 TYPE_MAX_VALUE (b->type)))
6383 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6384 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6385 TYPE_MIN_VALUE (b->type))));
6387 case FIXED_POINT_TYPE:
6388 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6390 case OFFSET_TYPE:
6391 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6393 case METHOD_TYPE:
6394 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6395 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6396 || (TYPE_ARG_TYPES (a->type)
6397 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6398 && TYPE_ARG_TYPES (b->type)
6399 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6400 && type_list_equal (TYPE_ARG_TYPES (a->type),
6401 TYPE_ARG_TYPES (b->type)))))
6402 break;
6403 return 0;
6404 case ARRAY_TYPE:
6405 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6406 where the flag should be inherited from the element type
6407 and can change after ARRAY_TYPEs are created; on non-aggregates
6408 compare it and hash it, scalars will never have that flag set
6409 and we need to differentiate between arrays created by different
6410 front-ends or middle-end created arrays. */
6411 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6412 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6413 || (TYPE_TYPELESS_STORAGE (a->type)
6414 == TYPE_TYPELESS_STORAGE (b->type))));
6416 case RECORD_TYPE:
6417 case UNION_TYPE:
6418 case QUAL_UNION_TYPE:
6419 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6420 || (TYPE_FIELDS (a->type)
6421 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6422 && TYPE_FIELDS (b->type)
6423 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6424 && type_list_equal (TYPE_FIELDS (a->type),
6425 TYPE_FIELDS (b->type))));
6427 case FUNCTION_TYPE:
6428 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6429 || (TYPE_ARG_TYPES (a->type)
6430 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6431 && TYPE_ARG_TYPES (b->type)
6432 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6433 && type_list_equal (TYPE_ARG_TYPES (a->type),
6434 TYPE_ARG_TYPES (b->type))))
6435 break;
6436 return 0;
6438 default:
6439 return 0;
6442 if (lang_hooks.types.type_hash_eq != NULL)
6443 return lang_hooks.types.type_hash_eq (a->type, b->type);
6445 return 1;
6448 /* Given TYPE, and HASHCODE its hash code, return the canonical
6449 object for an identical type if one already exists.
6450 Otherwise, return TYPE, and record it as the canonical object.
6452 To use this function, first create a type of the sort you want.
6453 Then compute its hash code from the fields of the type that
6454 make it different from other similar types.
6455 Then call this function and use the value. */
6457 tree
6458 type_hash_canon (unsigned int hashcode, tree type)
6460 type_hash in;
6461 type_hash **loc;
6463 /* The hash table only contains main variants, so ensure that's what we're
6464 being passed. */
6465 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6467 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6468 must call that routine before comparing TYPE_ALIGNs. */
6469 layout_type (type);
6471 in.hash = hashcode;
6472 in.type = type;
6474 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6475 if (*loc)
6477 tree t1 = ((type_hash *) *loc)->type;
6478 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6479 if (TYPE_UID (type) + 1 == next_type_uid)
6480 --next_type_uid;
6481 /* Free also min/max values and the cache for integer
6482 types. This can't be done in free_node, as LTO frees
6483 those on its own. */
6484 if (TREE_CODE (type) == INTEGER_TYPE)
6486 if (TYPE_MIN_VALUE (type)
6487 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6488 ggc_free (TYPE_MIN_VALUE (type));
6489 if (TYPE_MAX_VALUE (type)
6490 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6491 ggc_free (TYPE_MAX_VALUE (type));
6492 if (TYPE_CACHED_VALUES_P (type))
6493 ggc_free (TYPE_CACHED_VALUES (type));
6495 free_node (type);
6496 return t1;
6498 else
6500 struct type_hash *h;
6502 h = ggc_alloc<type_hash> ();
6503 h->hash = hashcode;
6504 h->type = type;
6505 *loc = h;
6507 return type;
6511 static void
6512 print_type_hash_statistics (void)
6514 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6515 (long) type_hash_table->size (),
6516 (long) type_hash_table->elements (),
6517 type_hash_table->collisions ());
6520 /* Given two lists of types
6521 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6522 return 1 if the lists contain the same types in the same order.
6523 Also, the TREE_PURPOSEs must match. */
6526 type_list_equal (const_tree l1, const_tree l2)
6528 const_tree t1, t2;
6530 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6531 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6532 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6533 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6534 && (TREE_TYPE (TREE_PURPOSE (t1))
6535 == TREE_TYPE (TREE_PURPOSE (t2))))))
6536 return 0;
6538 return t1 == t2;
6541 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6542 given by TYPE. If the argument list accepts variable arguments,
6543 then this function counts only the ordinary arguments. */
6546 type_num_arguments (const_tree type)
6548 int i = 0;
6549 tree t;
6551 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6552 /* If the function does not take a variable number of arguments,
6553 the last element in the list will have type `void'. */
6554 if (VOID_TYPE_P (TREE_VALUE (t)))
6555 break;
6556 else
6557 ++i;
6559 return i;
6562 /* Nonzero if integer constants T1 and T2
6563 represent the same constant value. */
6566 tree_int_cst_equal (const_tree t1, const_tree t2)
6568 if (t1 == t2)
6569 return 1;
6571 if (t1 == 0 || t2 == 0)
6572 return 0;
6574 if (TREE_CODE (t1) == INTEGER_CST
6575 && TREE_CODE (t2) == INTEGER_CST
6576 && wi::to_widest (t1) == wi::to_widest (t2))
6577 return 1;
6579 return 0;
6582 /* Return true if T is an INTEGER_CST whose numerical value (extended
6583 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6585 bool
6586 tree_fits_shwi_p (const_tree t)
6588 return (t != NULL_TREE
6589 && TREE_CODE (t) == INTEGER_CST
6590 && wi::fits_shwi_p (wi::to_widest (t)));
6593 /* Return true if T is an INTEGER_CST whose numerical value (extended
6594 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6596 bool
6597 tree_fits_uhwi_p (const_tree t)
6599 return (t != NULL_TREE
6600 && TREE_CODE (t) == INTEGER_CST
6601 && wi::fits_uhwi_p (wi::to_widest (t)));
6604 /* T is an INTEGER_CST whose numerical value (extended according to
6605 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6606 HOST_WIDE_INT. */
6608 HOST_WIDE_INT
6609 tree_to_shwi (const_tree t)
6611 gcc_assert (tree_fits_shwi_p (t));
6612 return TREE_INT_CST_LOW (t);
6615 /* T is an INTEGER_CST whose numerical value (extended according to
6616 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6617 HOST_WIDE_INT. */
6619 unsigned HOST_WIDE_INT
6620 tree_to_uhwi (const_tree t)
6622 gcc_assert (tree_fits_uhwi_p (t));
6623 return TREE_INT_CST_LOW (t);
6626 /* Return the most significant (sign) bit of T. */
6629 tree_int_cst_sign_bit (const_tree t)
6631 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6633 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6636 /* Return an indication of the sign of the integer constant T.
6637 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6638 Note that -1 will never be returned if T's type is unsigned. */
6641 tree_int_cst_sgn (const_tree t)
6643 if (wi::to_wide (t) == 0)
6644 return 0;
6645 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6646 return 1;
6647 else if (wi::neg_p (wi::to_wide (t)))
6648 return -1;
6649 else
6650 return 1;
6653 /* Return the minimum number of bits needed to represent VALUE in a
6654 signed or unsigned type, UNSIGNEDP says which. */
6656 unsigned int
6657 tree_int_cst_min_precision (tree value, signop sgn)
6659 /* If the value is negative, compute its negative minus 1. The latter
6660 adjustment is because the absolute value of the largest negative value
6661 is one larger than the largest positive value. This is equivalent to
6662 a bit-wise negation, so use that operation instead. */
6664 if (tree_int_cst_sgn (value) < 0)
6665 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6667 /* Return the number of bits needed, taking into account the fact
6668 that we need one more bit for a signed than unsigned type.
6669 If value is 0 or -1, the minimum precision is 1 no matter
6670 whether unsignedp is true or false. */
6672 if (integer_zerop (value))
6673 return 1;
6674 else
6675 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6678 /* Return truthvalue of whether T1 is the same tree structure as T2.
6679 Return 1 if they are the same.
6680 Return 0 if they are understandably different.
6681 Return -1 if either contains tree structure not understood by
6682 this function. */
6685 simple_cst_equal (const_tree t1, const_tree t2)
6687 enum tree_code code1, code2;
6688 int cmp;
6689 int i;
6691 if (t1 == t2)
6692 return 1;
6693 if (t1 == 0 || t2 == 0)
6694 return 0;
6696 code1 = TREE_CODE (t1);
6697 code2 = TREE_CODE (t2);
6699 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6701 if (CONVERT_EXPR_CODE_P (code2)
6702 || code2 == NON_LVALUE_EXPR)
6703 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6704 else
6705 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6708 else if (CONVERT_EXPR_CODE_P (code2)
6709 || code2 == NON_LVALUE_EXPR)
6710 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6712 if (code1 != code2)
6713 return 0;
6715 switch (code1)
6717 case INTEGER_CST:
6718 return wi::to_widest (t1) == wi::to_widest (t2);
6720 case REAL_CST:
6721 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6723 case FIXED_CST:
6724 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6726 case STRING_CST:
6727 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6728 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6729 TREE_STRING_LENGTH (t1)));
6731 case CONSTRUCTOR:
6733 unsigned HOST_WIDE_INT idx;
6734 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6735 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6737 if (vec_safe_length (v1) != vec_safe_length (v2))
6738 return false;
6740 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6741 /* ??? Should we handle also fields here? */
6742 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6743 return false;
6744 return true;
6747 case SAVE_EXPR:
6748 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6750 case CALL_EXPR:
6751 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6752 if (cmp <= 0)
6753 return cmp;
6754 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6755 return 0;
6757 const_tree arg1, arg2;
6758 const_call_expr_arg_iterator iter1, iter2;
6759 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6760 arg2 = first_const_call_expr_arg (t2, &iter2);
6761 arg1 && arg2;
6762 arg1 = next_const_call_expr_arg (&iter1),
6763 arg2 = next_const_call_expr_arg (&iter2))
6765 cmp = simple_cst_equal (arg1, arg2);
6766 if (cmp <= 0)
6767 return cmp;
6769 return arg1 == arg2;
6772 case TARGET_EXPR:
6773 /* Special case: if either target is an unallocated VAR_DECL,
6774 it means that it's going to be unified with whatever the
6775 TARGET_EXPR is really supposed to initialize, so treat it
6776 as being equivalent to anything. */
6777 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6778 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6779 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6780 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6781 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6782 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6783 cmp = 1;
6784 else
6785 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6787 if (cmp <= 0)
6788 return cmp;
6790 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6792 case WITH_CLEANUP_EXPR:
6793 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6794 if (cmp <= 0)
6795 return cmp;
6797 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6799 case COMPONENT_REF:
6800 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6801 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6803 return 0;
6805 case VAR_DECL:
6806 case PARM_DECL:
6807 case CONST_DECL:
6808 case FUNCTION_DECL:
6809 return 0;
6811 default:
6812 break;
6815 /* This general rule works for most tree codes. All exceptions should be
6816 handled above. If this is a language-specific tree code, we can't
6817 trust what might be in the operand, so say we don't know
6818 the situation. */
6819 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6820 return -1;
6822 switch (TREE_CODE_CLASS (code1))
6824 case tcc_unary:
6825 case tcc_binary:
6826 case tcc_comparison:
6827 case tcc_expression:
6828 case tcc_reference:
6829 case tcc_statement:
6830 cmp = 1;
6831 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6833 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6834 if (cmp <= 0)
6835 return cmp;
6838 return cmp;
6840 default:
6841 return -1;
6845 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6846 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6847 than U, respectively. */
6850 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6852 if (tree_int_cst_sgn (t) < 0)
6853 return -1;
6854 else if (!tree_fits_uhwi_p (t))
6855 return 1;
6856 else if (TREE_INT_CST_LOW (t) == u)
6857 return 0;
6858 else if (TREE_INT_CST_LOW (t) < u)
6859 return -1;
6860 else
6861 return 1;
6864 /* Return true if SIZE represents a constant size that is in bounds of
6865 what the middle-end and the backend accepts (covering not more than
6866 half of the address-space). */
6868 bool
6869 valid_constant_size_p (const_tree size)
6871 if (! tree_fits_uhwi_p (size)
6872 || TREE_OVERFLOW (size)
6873 || tree_int_cst_sign_bit (size) != 0)
6874 return false;
6875 return true;
6878 /* Return the precision of the type, or for a complex or vector type the
6879 precision of the type of its elements. */
6881 unsigned int
6882 element_precision (const_tree type)
6884 if (!TYPE_P (type))
6885 type = TREE_TYPE (type);
6886 enum tree_code code = TREE_CODE (type);
6887 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6888 type = TREE_TYPE (type);
6890 return TYPE_PRECISION (type);
6893 /* Return true if CODE represents an associative tree code. Otherwise
6894 return false. */
6895 bool
6896 associative_tree_code (enum tree_code code)
6898 switch (code)
6900 case BIT_IOR_EXPR:
6901 case BIT_AND_EXPR:
6902 case BIT_XOR_EXPR:
6903 case PLUS_EXPR:
6904 case MULT_EXPR:
6905 case MIN_EXPR:
6906 case MAX_EXPR:
6907 return true;
6909 default:
6910 break;
6912 return false;
6915 /* Return true if CODE represents a commutative tree code. Otherwise
6916 return false. */
6917 bool
6918 commutative_tree_code (enum tree_code code)
6920 switch (code)
6922 case PLUS_EXPR:
6923 case MULT_EXPR:
6924 case MULT_HIGHPART_EXPR:
6925 case MIN_EXPR:
6926 case MAX_EXPR:
6927 case BIT_IOR_EXPR:
6928 case BIT_XOR_EXPR:
6929 case BIT_AND_EXPR:
6930 case NE_EXPR:
6931 case EQ_EXPR:
6932 case UNORDERED_EXPR:
6933 case ORDERED_EXPR:
6934 case UNEQ_EXPR:
6935 case LTGT_EXPR:
6936 case TRUTH_AND_EXPR:
6937 case TRUTH_XOR_EXPR:
6938 case TRUTH_OR_EXPR:
6939 case WIDEN_MULT_EXPR:
6940 case VEC_WIDEN_MULT_HI_EXPR:
6941 case VEC_WIDEN_MULT_LO_EXPR:
6942 case VEC_WIDEN_MULT_EVEN_EXPR:
6943 case VEC_WIDEN_MULT_ODD_EXPR:
6944 return true;
6946 default:
6947 break;
6949 return false;
6952 /* Return true if CODE represents a ternary tree code for which the
6953 first two operands are commutative. Otherwise return false. */
6954 bool
6955 commutative_ternary_tree_code (enum tree_code code)
6957 switch (code)
6959 case WIDEN_MULT_PLUS_EXPR:
6960 case WIDEN_MULT_MINUS_EXPR:
6961 case DOT_PROD_EXPR:
6962 case FMA_EXPR:
6963 return true;
6965 default:
6966 break;
6968 return false;
6971 /* Returns true if CODE can overflow. */
6973 bool
6974 operation_can_overflow (enum tree_code code)
6976 switch (code)
6978 case PLUS_EXPR:
6979 case MINUS_EXPR:
6980 case MULT_EXPR:
6981 case LSHIFT_EXPR:
6982 /* Can overflow in various ways. */
6983 return true;
6984 case TRUNC_DIV_EXPR:
6985 case EXACT_DIV_EXPR:
6986 case FLOOR_DIV_EXPR:
6987 case CEIL_DIV_EXPR:
6988 /* For INT_MIN / -1. */
6989 return true;
6990 case NEGATE_EXPR:
6991 case ABS_EXPR:
6992 /* For -INT_MIN. */
6993 return true;
6994 default:
6995 /* These operators cannot overflow. */
6996 return false;
7000 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7001 ftrapv doesn't generate trapping insns for CODE. */
7003 bool
7004 operation_no_trapping_overflow (tree type, enum tree_code code)
7006 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7008 /* We don't generate instructions that trap on overflow for complex or vector
7009 types. */
7010 if (!INTEGRAL_TYPE_P (type))
7011 return true;
7013 if (!TYPE_OVERFLOW_TRAPS (type))
7014 return true;
7016 switch (code)
7018 case PLUS_EXPR:
7019 case MINUS_EXPR:
7020 case MULT_EXPR:
7021 case NEGATE_EXPR:
7022 case ABS_EXPR:
7023 /* These operators can overflow, and -ftrapv generates trapping code for
7024 these. */
7025 return false;
7026 case TRUNC_DIV_EXPR:
7027 case EXACT_DIV_EXPR:
7028 case FLOOR_DIV_EXPR:
7029 case CEIL_DIV_EXPR:
7030 case LSHIFT_EXPR:
7031 /* These operators can overflow, but -ftrapv does not generate trapping
7032 code for these. */
7033 return true;
7034 default:
7035 /* These operators cannot overflow. */
7036 return true;
7040 namespace inchash
7043 /* Generate a hash value for an expression. This can be used iteratively
7044 by passing a previous result as the HSTATE argument.
7046 This function is intended to produce the same hash for expressions which
7047 would compare equal using operand_equal_p. */
7048 void
7049 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7051 int i;
7052 enum tree_code code;
7053 enum tree_code_class tclass;
7055 if (t == NULL_TREE || t == error_mark_node)
7057 hstate.merge_hash (0);
7058 return;
7061 if (!(flags & OEP_ADDRESS_OF))
7062 STRIP_NOPS (t);
7064 code = TREE_CODE (t);
7066 switch (code)
7068 /* Alas, constants aren't shared, so we can't rely on pointer
7069 identity. */
7070 case VOID_CST:
7071 hstate.merge_hash (0);
7072 return;
7073 case INTEGER_CST:
7074 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7075 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7076 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7077 return;
7078 case REAL_CST:
7080 unsigned int val2;
7081 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7082 val2 = rvc_zero;
7083 else
7084 val2 = real_hash (TREE_REAL_CST_PTR (t));
7085 hstate.merge_hash (val2);
7086 return;
7088 case FIXED_CST:
7090 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7091 hstate.merge_hash (val2);
7092 return;
7094 case STRING_CST:
7095 hstate.add ((const void *) TREE_STRING_POINTER (t),
7096 TREE_STRING_LENGTH (t));
7097 return;
7098 case COMPLEX_CST:
7099 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7100 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7101 return;
7102 case VECTOR_CST:
7104 unsigned i;
7105 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7106 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7107 return;
7109 case SSA_NAME:
7110 /* We can just compare by pointer. */
7111 hstate.add_wide_int (SSA_NAME_VERSION (t));
7112 return;
7113 case PLACEHOLDER_EXPR:
7114 /* The node itself doesn't matter. */
7115 return;
7116 case BLOCK:
7117 case OMP_CLAUSE:
7118 /* Ignore. */
7119 return;
7120 case TREE_LIST:
7121 /* A list of expressions, for a CALL_EXPR or as the elements of a
7122 VECTOR_CST. */
7123 for (; t; t = TREE_CHAIN (t))
7124 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7125 return;
7126 case CONSTRUCTOR:
7128 unsigned HOST_WIDE_INT idx;
7129 tree field, value;
7130 flags &= ~OEP_ADDRESS_OF;
7131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7133 inchash::add_expr (field, hstate, flags);
7134 inchash::add_expr (value, hstate, flags);
7136 return;
7138 case STATEMENT_LIST:
7140 tree_stmt_iterator i;
7141 for (i = tsi_start (CONST_CAST_TREE (t));
7142 !tsi_end_p (i); tsi_next (&i))
7143 inchash::add_expr (tsi_stmt (i), hstate, flags);
7144 return;
7146 case TREE_VEC:
7147 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7148 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7149 return;
7150 case FUNCTION_DECL:
7151 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7152 Otherwise nodes that compare equal according to operand_equal_p might
7153 get different hash codes. However, don't do this for machine specific
7154 or front end builtins, since the function code is overloaded in those
7155 cases. */
7156 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7157 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7159 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7160 code = TREE_CODE (t);
7162 /* FALL THROUGH */
7163 default:
7164 tclass = TREE_CODE_CLASS (code);
7166 if (tclass == tcc_declaration)
7168 /* DECL's have a unique ID */
7169 hstate.add_wide_int (DECL_UID (t));
7171 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7173 /* For comparisons that can be swapped, use the lower
7174 tree code. */
7175 enum tree_code ccode = swap_tree_comparison (code);
7176 if (code < ccode)
7177 ccode = code;
7178 hstate.add_object (ccode);
7179 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7180 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7182 else if (CONVERT_EXPR_CODE_P (code))
7184 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7185 operand_equal_p. */
7186 enum tree_code ccode = NOP_EXPR;
7187 hstate.add_object (ccode);
7189 /* Don't hash the type, that can lead to having nodes which
7190 compare equal according to operand_equal_p, but which
7191 have different hash codes. Make sure to include signedness
7192 in the hash computation. */
7193 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7194 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7196 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7197 else if (code == MEM_REF
7198 && (flags & OEP_ADDRESS_OF) != 0
7199 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7200 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7201 && integer_zerop (TREE_OPERAND (t, 1)))
7202 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7203 hstate, flags);
7204 /* Don't ICE on FE specific trees, or their arguments etc.
7205 during operand_equal_p hash verification. */
7206 else if (!IS_EXPR_CODE_CLASS (tclass))
7207 gcc_assert (flags & OEP_HASH_CHECK);
7208 else
7210 unsigned int sflags = flags;
7212 hstate.add_object (code);
7214 switch (code)
7216 case ADDR_EXPR:
7217 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7218 flags |= OEP_ADDRESS_OF;
7219 sflags = flags;
7220 break;
7222 case INDIRECT_REF:
7223 case MEM_REF:
7224 case TARGET_MEM_REF:
7225 flags &= ~OEP_ADDRESS_OF;
7226 sflags = flags;
7227 break;
7229 case ARRAY_REF:
7230 case ARRAY_RANGE_REF:
7231 case COMPONENT_REF:
7232 case BIT_FIELD_REF:
7233 sflags &= ~OEP_ADDRESS_OF;
7234 break;
7236 case COND_EXPR:
7237 flags &= ~OEP_ADDRESS_OF;
7238 break;
7240 case FMA_EXPR:
7241 case WIDEN_MULT_PLUS_EXPR:
7242 case WIDEN_MULT_MINUS_EXPR:
7244 /* The multiplication operands are commutative. */
7245 inchash::hash one, two;
7246 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7247 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7248 hstate.add_commutative (one, two);
7249 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7250 return;
7253 case CALL_EXPR:
7254 if (CALL_EXPR_FN (t) == NULL_TREE)
7255 hstate.add_int (CALL_EXPR_IFN (t));
7256 break;
7258 case TARGET_EXPR:
7259 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7260 Usually different TARGET_EXPRs just should use
7261 different temporaries in their slots. */
7262 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7263 return;
7265 default:
7266 break;
7269 /* Don't hash the type, that can lead to having nodes which
7270 compare equal according to operand_equal_p, but which
7271 have different hash codes. */
7272 if (code == NON_LVALUE_EXPR)
7274 /* Make sure to include signness in the hash computation. */
7275 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7276 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7279 else if (commutative_tree_code (code))
7281 /* It's a commutative expression. We want to hash it the same
7282 however it appears. We do this by first hashing both operands
7283 and then rehashing based on the order of their independent
7284 hashes. */
7285 inchash::hash one, two;
7286 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7287 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7288 hstate.add_commutative (one, two);
7290 else
7291 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7292 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7293 i == 0 ? flags : sflags);
7295 return;
7301 /* Constructors for pointer, array and function types.
7302 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7303 constructed by language-dependent code, not here.) */
7305 /* Construct, lay out and return the type of pointers to TO_TYPE with
7306 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7307 reference all of memory. If such a type has already been
7308 constructed, reuse it. */
7310 tree
7311 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7312 bool can_alias_all)
7314 tree t;
7315 bool could_alias = can_alias_all;
7317 if (to_type == error_mark_node)
7318 return error_mark_node;
7320 /* If the pointed-to type has the may_alias attribute set, force
7321 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7322 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7323 can_alias_all = true;
7325 /* In some cases, languages will have things that aren't a POINTER_TYPE
7326 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7327 In that case, return that type without regard to the rest of our
7328 operands.
7330 ??? This is a kludge, but consistent with the way this function has
7331 always operated and there doesn't seem to be a good way to avoid this
7332 at the moment. */
7333 if (TYPE_POINTER_TO (to_type) != 0
7334 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7335 return TYPE_POINTER_TO (to_type);
7337 /* First, if we already have a type for pointers to TO_TYPE and it's
7338 the proper mode, use it. */
7339 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7340 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7341 return t;
7343 t = make_node (POINTER_TYPE);
7345 TREE_TYPE (t) = to_type;
7346 SET_TYPE_MODE (t, mode);
7347 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7348 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7349 TYPE_POINTER_TO (to_type) = t;
7351 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7352 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7353 SET_TYPE_STRUCTURAL_EQUALITY (t);
7354 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7355 TYPE_CANONICAL (t)
7356 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7357 mode, false);
7359 /* Lay out the type. This function has many callers that are concerned
7360 with expression-construction, and this simplifies them all. */
7361 layout_type (t);
7363 return t;
7366 /* By default build pointers in ptr_mode. */
7368 tree
7369 build_pointer_type (tree to_type)
7371 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7372 : TYPE_ADDR_SPACE (to_type);
7373 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7374 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7377 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7379 tree
7380 build_reference_type_for_mode (tree to_type, machine_mode mode,
7381 bool can_alias_all)
7383 tree t;
7384 bool could_alias = can_alias_all;
7386 if (to_type == error_mark_node)
7387 return error_mark_node;
7389 /* If the pointed-to type has the may_alias attribute set, force
7390 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7391 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7392 can_alias_all = true;
7394 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7395 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7396 In that case, return that type without regard to the rest of our
7397 operands.
7399 ??? This is a kludge, but consistent with the way this function has
7400 always operated and there doesn't seem to be a good way to avoid this
7401 at the moment. */
7402 if (TYPE_REFERENCE_TO (to_type) != 0
7403 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7404 return TYPE_REFERENCE_TO (to_type);
7406 /* First, if we already have a type for pointers to TO_TYPE and it's
7407 the proper mode, use it. */
7408 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7409 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7410 return t;
7412 t = make_node (REFERENCE_TYPE);
7414 TREE_TYPE (t) = to_type;
7415 SET_TYPE_MODE (t, mode);
7416 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7417 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7418 TYPE_REFERENCE_TO (to_type) = t;
7420 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7421 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7422 SET_TYPE_STRUCTURAL_EQUALITY (t);
7423 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7424 TYPE_CANONICAL (t)
7425 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7426 mode, false);
7428 layout_type (t);
7430 return t;
7434 /* Build the node for the type of references-to-TO_TYPE by default
7435 in ptr_mode. */
7437 tree
7438 build_reference_type (tree to_type)
7440 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7441 : TYPE_ADDR_SPACE (to_type);
7442 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7443 return build_reference_type_for_mode (to_type, pointer_mode, false);
7446 #define MAX_INT_CACHED_PREC \
7447 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7448 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7450 /* Builds a signed or unsigned integer type of precision PRECISION.
7451 Used for C bitfields whose precision does not match that of
7452 built-in target types. */
7453 tree
7454 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7455 int unsignedp)
7457 tree itype, ret;
7459 if (unsignedp)
7460 unsignedp = MAX_INT_CACHED_PREC + 1;
7462 if (precision <= MAX_INT_CACHED_PREC)
7464 itype = nonstandard_integer_type_cache[precision + unsignedp];
7465 if (itype)
7466 return itype;
7469 itype = make_node (INTEGER_TYPE);
7470 TYPE_PRECISION (itype) = precision;
7472 if (unsignedp)
7473 fixup_unsigned_type (itype);
7474 else
7475 fixup_signed_type (itype);
7477 ret = itype;
7478 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7479 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7480 if (precision <= MAX_INT_CACHED_PREC)
7481 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7483 return ret;
7486 #define MAX_BOOL_CACHED_PREC \
7487 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7488 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7490 /* Builds a boolean type of precision PRECISION.
7491 Used for boolean vectors to choose proper vector element size. */
7492 tree
7493 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7495 tree type;
7497 if (precision <= MAX_BOOL_CACHED_PREC)
7499 type = nonstandard_boolean_type_cache[precision];
7500 if (type)
7501 return type;
7504 type = make_node (BOOLEAN_TYPE);
7505 TYPE_PRECISION (type) = precision;
7506 fixup_signed_type (type);
7508 if (precision <= MAX_INT_CACHED_PREC)
7509 nonstandard_boolean_type_cache[precision] = type;
7511 return type;
7514 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7515 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7516 is true, reuse such a type that has already been constructed. */
7518 static tree
7519 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7521 tree itype = make_node (INTEGER_TYPE);
7523 TREE_TYPE (itype) = type;
7525 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7526 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7528 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7529 SET_TYPE_MODE (itype, TYPE_MODE (type));
7530 TYPE_SIZE (itype) = TYPE_SIZE (type);
7531 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7532 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7533 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7534 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7536 if (!shared)
7537 return itype;
7539 if ((TYPE_MIN_VALUE (itype)
7540 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7541 || (TYPE_MAX_VALUE (itype)
7542 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7544 /* Since we cannot reliably merge this type, we need to compare it using
7545 structural equality checks. */
7546 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7547 return itype;
7550 hashval_t hash = type_hash_canon_hash (itype);
7551 itype = type_hash_canon (hash, itype);
7553 return itype;
7556 /* Wrapper around build_range_type_1 with SHARED set to true. */
7558 tree
7559 build_range_type (tree type, tree lowval, tree highval)
7561 return build_range_type_1 (type, lowval, highval, true);
7564 /* Wrapper around build_range_type_1 with SHARED set to false. */
7566 tree
7567 build_nonshared_range_type (tree type, tree lowval, tree highval)
7569 return build_range_type_1 (type, lowval, highval, false);
7572 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7573 MAXVAL should be the maximum value in the domain
7574 (one less than the length of the array).
7576 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7577 We don't enforce this limit, that is up to caller (e.g. language front end).
7578 The limit exists because the result is a signed type and we don't handle
7579 sizes that use more than one HOST_WIDE_INT. */
7581 tree
7582 build_index_type (tree maxval)
7584 return build_range_type (sizetype, size_zero_node, maxval);
7587 /* Return true if the debug information for TYPE, a subtype, should be emitted
7588 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7589 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7590 debug info and doesn't reflect the source code. */
7592 bool
7593 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7595 tree base_type = TREE_TYPE (type), low, high;
7597 /* Subrange types have a base type which is an integral type. */
7598 if (!INTEGRAL_TYPE_P (base_type))
7599 return false;
7601 /* Get the real bounds of the subtype. */
7602 if (lang_hooks.types.get_subrange_bounds)
7603 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7604 else
7606 low = TYPE_MIN_VALUE (type);
7607 high = TYPE_MAX_VALUE (type);
7610 /* If the type and its base type have the same representation and the same
7611 name, then the type is not a subrange but a copy of the base type. */
7612 if ((TREE_CODE (base_type) == INTEGER_TYPE
7613 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7614 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7615 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7616 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7617 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7618 return false;
7620 if (lowval)
7621 *lowval = low;
7622 if (highval)
7623 *highval = high;
7624 return true;
7627 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7628 and number of elements specified by the range of values of INDEX_TYPE.
7629 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7630 If SHARED is true, reuse such a type that has already been constructed. */
7632 static tree
7633 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7634 bool shared)
7636 tree t;
7638 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7640 error ("arrays of functions are not meaningful");
7641 elt_type = integer_type_node;
7644 t = make_node (ARRAY_TYPE);
7645 TREE_TYPE (t) = elt_type;
7646 TYPE_DOMAIN (t) = index_type;
7647 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7648 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7649 layout_type (t);
7651 /* If the element type is incomplete at this point we get marked for
7652 structural equality. Do not record these types in the canonical
7653 type hashtable. */
7654 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7655 return t;
7657 if (shared)
7659 hashval_t hash = type_hash_canon_hash (t);
7660 t = type_hash_canon (hash, t);
7663 if (TYPE_CANONICAL (t) == t)
7665 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7666 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7667 || in_lto_p)
7668 SET_TYPE_STRUCTURAL_EQUALITY (t);
7669 else if (TYPE_CANONICAL (elt_type) != elt_type
7670 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7671 TYPE_CANONICAL (t)
7672 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7673 index_type
7674 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7675 typeless_storage, shared);
7678 return t;
7681 /* Wrapper around build_array_type_1 with SHARED set to true. */
7683 tree
7684 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7686 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7689 /* Wrapper around build_array_type_1 with SHARED set to false. */
7691 tree
7692 build_nonshared_array_type (tree elt_type, tree index_type)
7694 return build_array_type_1 (elt_type, index_type, false, false);
7697 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7698 sizetype. */
7700 tree
7701 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7703 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7706 /* Recursively examines the array elements of TYPE, until a non-array
7707 element type is found. */
7709 tree
7710 strip_array_types (tree type)
7712 while (TREE_CODE (type) == ARRAY_TYPE)
7713 type = TREE_TYPE (type);
7715 return type;
7718 /* Computes the canonical argument types from the argument type list
7719 ARGTYPES.
7721 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7722 on entry to this function, or if any of the ARGTYPES are
7723 structural.
7725 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7726 true on entry to this function, or if any of the ARGTYPES are
7727 non-canonical.
7729 Returns a canonical argument list, which may be ARGTYPES when the
7730 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7731 true) or would not differ from ARGTYPES. */
7733 static tree
7734 maybe_canonicalize_argtypes (tree argtypes,
7735 bool *any_structural_p,
7736 bool *any_noncanonical_p)
7738 tree arg;
7739 bool any_noncanonical_argtypes_p = false;
7741 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7743 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7744 /* Fail gracefully by stating that the type is structural. */
7745 *any_structural_p = true;
7746 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7747 *any_structural_p = true;
7748 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7749 || TREE_PURPOSE (arg))
7750 /* If the argument has a default argument, we consider it
7751 non-canonical even though the type itself is canonical.
7752 That way, different variants of function and method types
7753 with default arguments will all point to the variant with
7754 no defaults as their canonical type. */
7755 any_noncanonical_argtypes_p = true;
7758 if (*any_structural_p)
7759 return argtypes;
7761 if (any_noncanonical_argtypes_p)
7763 /* Build the canonical list of argument types. */
7764 tree canon_argtypes = NULL_TREE;
7765 bool is_void = false;
7767 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7769 if (arg == void_list_node)
7770 is_void = true;
7771 else
7772 canon_argtypes = tree_cons (NULL_TREE,
7773 TYPE_CANONICAL (TREE_VALUE (arg)),
7774 canon_argtypes);
7777 canon_argtypes = nreverse (canon_argtypes);
7778 if (is_void)
7779 canon_argtypes = chainon (canon_argtypes, void_list_node);
7781 /* There is a non-canonical type. */
7782 *any_noncanonical_p = true;
7783 return canon_argtypes;
7786 /* The canonical argument types are the same as ARGTYPES. */
7787 return argtypes;
7790 /* Construct, lay out and return
7791 the type of functions returning type VALUE_TYPE
7792 given arguments of types ARG_TYPES.
7793 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7794 are data type nodes for the arguments of the function.
7795 If such a type has already been constructed, reuse it. */
7797 tree
7798 build_function_type (tree value_type, tree arg_types)
7800 tree t;
7801 inchash::hash hstate;
7802 bool any_structural_p, any_noncanonical_p;
7803 tree canon_argtypes;
7805 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7807 error ("function return type cannot be function");
7808 value_type = integer_type_node;
7811 /* Make a node of the sort we want. */
7812 t = make_node (FUNCTION_TYPE);
7813 TREE_TYPE (t) = value_type;
7814 TYPE_ARG_TYPES (t) = arg_types;
7816 /* If we already have such a type, use the old one. */
7817 hashval_t hash = type_hash_canon_hash (t);
7818 t = type_hash_canon (hash, t);
7820 /* Set up the canonical type. */
7821 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7822 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7823 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7824 &any_structural_p,
7825 &any_noncanonical_p);
7826 if (any_structural_p)
7827 SET_TYPE_STRUCTURAL_EQUALITY (t);
7828 else if (any_noncanonical_p)
7829 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7830 canon_argtypes);
7832 if (!COMPLETE_TYPE_P (t))
7833 layout_type (t);
7834 return t;
7837 /* Build a function type. The RETURN_TYPE is the type returned by the
7838 function. If VAARGS is set, no void_type_node is appended to the
7839 list. ARGP must be always be terminated be a NULL_TREE. */
7841 static tree
7842 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7844 tree t, args, last;
7846 t = va_arg (argp, tree);
7847 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7848 args = tree_cons (NULL_TREE, t, args);
7850 if (vaargs)
7852 last = args;
7853 if (args != NULL_TREE)
7854 args = nreverse (args);
7855 gcc_assert (last != void_list_node);
7857 else if (args == NULL_TREE)
7858 args = void_list_node;
7859 else
7861 last = args;
7862 args = nreverse (args);
7863 TREE_CHAIN (last) = void_list_node;
7865 args = build_function_type (return_type, args);
7867 return args;
7870 /* Build a function type. The RETURN_TYPE is the type returned by the
7871 function. If additional arguments are provided, they are
7872 additional argument types. The list of argument types must always
7873 be terminated by NULL_TREE. */
7875 tree
7876 build_function_type_list (tree return_type, ...)
7878 tree args;
7879 va_list p;
7881 va_start (p, return_type);
7882 args = build_function_type_list_1 (false, return_type, p);
7883 va_end (p);
7884 return args;
7887 /* Build a variable argument function type. The RETURN_TYPE is the
7888 type returned by the function. If additional arguments are provided,
7889 they are additional argument types. The list of argument types must
7890 always be terminated by NULL_TREE. */
7892 tree
7893 build_varargs_function_type_list (tree return_type, ...)
7895 tree args;
7896 va_list p;
7898 va_start (p, return_type);
7899 args = build_function_type_list_1 (true, return_type, p);
7900 va_end (p);
7902 return args;
7905 /* Build a function type. RETURN_TYPE is the type returned by the
7906 function; VAARGS indicates whether the function takes varargs. The
7907 function takes N named arguments, the types of which are provided in
7908 ARG_TYPES. */
7910 static tree
7911 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7912 tree *arg_types)
7914 int i;
7915 tree t = vaargs ? NULL_TREE : void_list_node;
7917 for (i = n - 1; i >= 0; i--)
7918 t = tree_cons (NULL_TREE, arg_types[i], t);
7920 return build_function_type (return_type, t);
7923 /* Build a function type. RETURN_TYPE is the type returned by the
7924 function. The function takes N named arguments, the types of which
7925 are provided in ARG_TYPES. */
7927 tree
7928 build_function_type_array (tree return_type, int n, tree *arg_types)
7930 return build_function_type_array_1 (false, return_type, n, arg_types);
7933 /* Build a variable argument function type. RETURN_TYPE is the type
7934 returned by the function. The function takes N named arguments, the
7935 types of which are provided in ARG_TYPES. */
7937 tree
7938 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7940 return build_function_type_array_1 (true, return_type, n, arg_types);
7943 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7944 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7945 for the method. An implicit additional parameter (of type
7946 pointer-to-BASETYPE) is added to the ARGTYPES. */
7948 tree
7949 build_method_type_directly (tree basetype,
7950 tree rettype,
7951 tree argtypes)
7953 tree t;
7954 tree ptype;
7955 bool any_structural_p, any_noncanonical_p;
7956 tree canon_argtypes;
7958 /* Make a node of the sort we want. */
7959 t = make_node (METHOD_TYPE);
7961 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7962 TREE_TYPE (t) = rettype;
7963 ptype = build_pointer_type (basetype);
7965 /* The actual arglist for this function includes a "hidden" argument
7966 which is "this". Put it into the list of argument types. */
7967 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7968 TYPE_ARG_TYPES (t) = argtypes;
7970 /* If we already have such a type, use the old one. */
7971 hashval_t hash = type_hash_canon_hash (t);
7972 t = type_hash_canon (hash, t);
7974 /* Set up the canonical type. */
7975 any_structural_p
7976 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7977 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7978 any_noncanonical_p
7979 = (TYPE_CANONICAL (basetype) != basetype
7980 || TYPE_CANONICAL (rettype) != rettype);
7981 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7982 &any_structural_p,
7983 &any_noncanonical_p);
7984 if (any_structural_p)
7985 SET_TYPE_STRUCTURAL_EQUALITY (t);
7986 else if (any_noncanonical_p)
7987 TYPE_CANONICAL (t)
7988 = build_method_type_directly (TYPE_CANONICAL (basetype),
7989 TYPE_CANONICAL (rettype),
7990 canon_argtypes);
7991 if (!COMPLETE_TYPE_P (t))
7992 layout_type (t);
7994 return t;
7997 /* Construct, lay out and return the type of methods belonging to class
7998 BASETYPE and whose arguments and values are described by TYPE.
7999 If that type exists already, reuse it.
8000 TYPE must be a FUNCTION_TYPE node. */
8002 tree
8003 build_method_type (tree basetype, tree type)
8005 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8007 return build_method_type_directly (basetype,
8008 TREE_TYPE (type),
8009 TYPE_ARG_TYPES (type));
8012 /* Construct, lay out and return the type of offsets to a value
8013 of type TYPE, within an object of type BASETYPE.
8014 If a suitable offset type exists already, reuse it. */
8016 tree
8017 build_offset_type (tree basetype, tree type)
8019 tree t;
8021 /* Make a node of the sort we want. */
8022 t = make_node (OFFSET_TYPE);
8024 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8025 TREE_TYPE (t) = type;
8027 /* If we already have such a type, use the old one. */
8028 hashval_t hash = type_hash_canon_hash (t);
8029 t = type_hash_canon (hash, t);
8031 if (!COMPLETE_TYPE_P (t))
8032 layout_type (t);
8034 if (TYPE_CANONICAL (t) == t)
8036 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8037 || TYPE_STRUCTURAL_EQUALITY_P (type))
8038 SET_TYPE_STRUCTURAL_EQUALITY (t);
8039 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8040 || TYPE_CANONICAL (type) != type)
8041 TYPE_CANONICAL (t)
8042 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8043 TYPE_CANONICAL (type));
8046 return t;
8049 /* Create a complex type whose components are COMPONENT_TYPE.
8051 If NAMED is true, the type is given a TYPE_NAME. We do not always
8052 do so because this creates a DECL node and thus make the DECL_UIDs
8053 dependent on the type canonicalization hashtable, which is GC-ed,
8054 so the DECL_UIDs would not be stable wrt garbage collection. */
8056 tree
8057 build_complex_type (tree component_type, bool named)
8059 tree t;
8061 gcc_assert (INTEGRAL_TYPE_P (component_type)
8062 || SCALAR_FLOAT_TYPE_P (component_type)
8063 || FIXED_POINT_TYPE_P (component_type));
8065 /* Make a node of the sort we want. */
8066 t = make_node (COMPLEX_TYPE);
8068 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8070 /* If we already have such a type, use the old one. */
8071 hashval_t hash = type_hash_canon_hash (t);
8072 t = type_hash_canon (hash, t);
8074 if (!COMPLETE_TYPE_P (t))
8075 layout_type (t);
8077 if (TYPE_CANONICAL (t) == t)
8079 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8080 SET_TYPE_STRUCTURAL_EQUALITY (t);
8081 else if (TYPE_CANONICAL (component_type) != component_type)
8082 TYPE_CANONICAL (t)
8083 = build_complex_type (TYPE_CANONICAL (component_type), named);
8086 /* We need to create a name, since complex is a fundamental type. */
8087 if (!TYPE_NAME (t) && named)
8089 const char *name;
8090 if (component_type == char_type_node)
8091 name = "complex char";
8092 else if (component_type == signed_char_type_node)
8093 name = "complex signed char";
8094 else if (component_type == unsigned_char_type_node)
8095 name = "complex unsigned char";
8096 else if (component_type == short_integer_type_node)
8097 name = "complex short int";
8098 else if (component_type == short_unsigned_type_node)
8099 name = "complex short unsigned int";
8100 else if (component_type == integer_type_node)
8101 name = "complex int";
8102 else if (component_type == unsigned_type_node)
8103 name = "complex unsigned int";
8104 else if (component_type == long_integer_type_node)
8105 name = "complex long int";
8106 else if (component_type == long_unsigned_type_node)
8107 name = "complex long unsigned int";
8108 else if (component_type == long_long_integer_type_node)
8109 name = "complex long long int";
8110 else if (component_type == long_long_unsigned_type_node)
8111 name = "complex long long unsigned int";
8112 else
8113 name = 0;
8115 if (name != 0)
8116 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8117 get_identifier (name), t);
8120 return build_qualified_type (t, TYPE_QUALS (component_type));
8123 /* If TYPE is a real or complex floating-point type and the target
8124 does not directly support arithmetic on TYPE then return the wider
8125 type to be used for arithmetic on TYPE. Otherwise, return
8126 NULL_TREE. */
8128 tree
8129 excess_precision_type (tree type)
8131 /* The target can give two different responses to the question of
8132 which excess precision mode it would like depending on whether we
8133 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8135 enum excess_precision_type requested_type
8136 = (flag_excess_precision == EXCESS_PRECISION_FAST
8137 ? EXCESS_PRECISION_TYPE_FAST
8138 : EXCESS_PRECISION_TYPE_STANDARD);
8140 enum flt_eval_method target_flt_eval_method
8141 = targetm.c.excess_precision (requested_type);
8143 /* The target should not ask for unpredictable float evaluation (though
8144 it might advertise that implicitly the evaluation is unpredictable,
8145 but we don't care about that here, it will have been reported
8146 elsewhere). If it does ask for unpredictable evaluation, we have
8147 nothing to do here. */
8148 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8150 /* Nothing to do. The target has asked for all types we know about
8151 to be computed with their native precision and range. */
8152 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8153 return NULL_TREE;
8155 /* The target will promote this type in a target-dependent way, so excess
8156 precision ought to leave it alone. */
8157 if (targetm.promoted_type (type) != NULL_TREE)
8158 return NULL_TREE;
8160 machine_mode float16_type_mode = (float16_type_node
8161 ? TYPE_MODE (float16_type_node)
8162 : VOIDmode);
8163 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8164 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8166 switch (TREE_CODE (type))
8168 case REAL_TYPE:
8170 machine_mode type_mode = TYPE_MODE (type);
8171 switch (target_flt_eval_method)
8173 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8174 if (type_mode == float16_type_mode)
8175 return float_type_node;
8176 break;
8177 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8178 if (type_mode == float16_type_mode
8179 || type_mode == float_type_mode)
8180 return double_type_node;
8181 break;
8182 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8183 if (type_mode == float16_type_mode
8184 || type_mode == float_type_mode
8185 || type_mode == double_type_mode)
8186 return long_double_type_node;
8187 break;
8188 default:
8189 gcc_unreachable ();
8191 break;
8193 case COMPLEX_TYPE:
8195 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8196 return NULL_TREE;
8197 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8198 switch (target_flt_eval_method)
8200 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8201 if (type_mode == float16_type_mode)
8202 return complex_float_type_node;
8203 break;
8204 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8205 if (type_mode == float16_type_mode
8206 || type_mode == float_type_mode)
8207 return complex_double_type_node;
8208 break;
8209 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8210 if (type_mode == float16_type_mode
8211 || type_mode == float_type_mode
8212 || type_mode == double_type_mode)
8213 return complex_long_double_type_node;
8214 break;
8215 default:
8216 gcc_unreachable ();
8218 break;
8220 default:
8221 break;
8224 return NULL_TREE;
8227 /* Return OP, stripped of any conversions to wider types as much as is safe.
8228 Converting the value back to OP's type makes a value equivalent to OP.
8230 If FOR_TYPE is nonzero, we return a value which, if converted to
8231 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8233 OP must have integer, real or enumeral type. Pointers are not allowed!
8235 There are some cases where the obvious value we could return
8236 would regenerate to OP if converted to OP's type,
8237 but would not extend like OP to wider types.
8238 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8239 For example, if OP is (unsigned short)(signed char)-1,
8240 we avoid returning (signed char)-1 if FOR_TYPE is int,
8241 even though extending that to an unsigned short would regenerate OP,
8242 since the result of extending (signed char)-1 to (int)
8243 is different from (int) OP. */
8245 tree
8246 get_unwidened (tree op, tree for_type)
8248 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8249 tree type = TREE_TYPE (op);
8250 unsigned final_prec
8251 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8252 int uns
8253 = (for_type != 0 && for_type != type
8254 && final_prec > TYPE_PRECISION (type)
8255 && TYPE_UNSIGNED (type));
8256 tree win = op;
8258 while (CONVERT_EXPR_P (op))
8260 int bitschange;
8262 /* TYPE_PRECISION on vector types has different meaning
8263 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8264 so avoid them here. */
8265 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8266 break;
8268 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8269 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8271 /* Truncations are many-one so cannot be removed.
8272 Unless we are later going to truncate down even farther. */
8273 if (bitschange < 0
8274 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8275 break;
8277 /* See what's inside this conversion. If we decide to strip it,
8278 we will set WIN. */
8279 op = TREE_OPERAND (op, 0);
8281 /* If we have not stripped any zero-extensions (uns is 0),
8282 we can strip any kind of extension.
8283 If we have previously stripped a zero-extension,
8284 only zero-extensions can safely be stripped.
8285 Any extension can be stripped if the bits it would produce
8286 are all going to be discarded later by truncating to FOR_TYPE. */
8288 if (bitschange > 0)
8290 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8291 win = op;
8292 /* TYPE_UNSIGNED says whether this is a zero-extension.
8293 Let's avoid computing it if it does not affect WIN
8294 and if UNS will not be needed again. */
8295 if ((uns
8296 || CONVERT_EXPR_P (op))
8297 && TYPE_UNSIGNED (TREE_TYPE (op)))
8299 uns = 1;
8300 win = op;
8305 /* If we finally reach a constant see if it fits in sth smaller and
8306 in that case convert it. */
8307 if (TREE_CODE (win) == INTEGER_CST)
8309 tree wtype = TREE_TYPE (win);
8310 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8311 if (for_type)
8312 prec = MAX (prec, final_prec);
8313 if (prec < TYPE_PRECISION (wtype))
8315 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8316 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8317 win = fold_convert (t, win);
8321 return win;
8324 /* Return OP or a simpler expression for a narrower value
8325 which can be sign-extended or zero-extended to give back OP.
8326 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8327 or 0 if the value should be sign-extended. */
8329 tree
8330 get_narrower (tree op, int *unsignedp_ptr)
8332 int uns = 0;
8333 int first = 1;
8334 tree win = op;
8335 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8337 while (TREE_CODE (op) == NOP_EXPR)
8339 int bitschange
8340 = (TYPE_PRECISION (TREE_TYPE (op))
8341 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8343 /* Truncations are many-one so cannot be removed. */
8344 if (bitschange < 0)
8345 break;
8347 /* See what's inside this conversion. If we decide to strip it,
8348 we will set WIN. */
8350 if (bitschange > 0)
8352 op = TREE_OPERAND (op, 0);
8353 /* An extension: the outermost one can be stripped,
8354 but remember whether it is zero or sign extension. */
8355 if (first)
8356 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8357 /* Otherwise, if a sign extension has been stripped,
8358 only sign extensions can now be stripped;
8359 if a zero extension has been stripped, only zero-extensions. */
8360 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8361 break;
8362 first = 0;
8364 else /* bitschange == 0 */
8366 /* A change in nominal type can always be stripped, but we must
8367 preserve the unsignedness. */
8368 if (first)
8369 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8370 first = 0;
8371 op = TREE_OPERAND (op, 0);
8372 /* Keep trying to narrow, but don't assign op to win if it
8373 would turn an integral type into something else. */
8374 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8375 continue;
8378 win = op;
8381 if (TREE_CODE (op) == COMPONENT_REF
8382 /* Since type_for_size always gives an integer type. */
8383 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8384 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8385 /* Ensure field is laid out already. */
8386 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8387 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8389 unsigned HOST_WIDE_INT innerprec
8390 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8391 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8392 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8393 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8395 /* We can get this structure field in a narrower type that fits it,
8396 but the resulting extension to its nominal type (a fullword type)
8397 must satisfy the same conditions as for other extensions.
8399 Do this only for fields that are aligned (not bit-fields),
8400 because when bit-field insns will be used there is no
8401 advantage in doing this. */
8403 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8404 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8405 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8406 && type != 0)
8408 if (first)
8409 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8410 win = fold_convert (type, op);
8414 *unsignedp_ptr = uns;
8415 return win;
8418 /* Return true if integer constant C has a value that is permissible
8419 for TYPE, an integral type. */
8421 bool
8422 int_fits_type_p (const_tree c, const_tree type)
8424 tree type_low_bound, type_high_bound;
8425 bool ok_for_low_bound, ok_for_high_bound;
8426 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8428 /* Non-standard boolean types can have arbitrary precision but various
8429 transformations assume that they can only take values 0 and +/-1. */
8430 if (TREE_CODE (type) == BOOLEAN_TYPE)
8431 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8433 retry:
8434 type_low_bound = TYPE_MIN_VALUE (type);
8435 type_high_bound = TYPE_MAX_VALUE (type);
8437 /* If at least one bound of the type is a constant integer, we can check
8438 ourselves and maybe make a decision. If no such decision is possible, but
8439 this type is a subtype, try checking against that. Otherwise, use
8440 fits_to_tree_p, which checks against the precision.
8442 Compute the status for each possibly constant bound, and return if we see
8443 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8444 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8445 for "constant known to fit". */
8447 /* Check if c >= type_low_bound. */
8448 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8450 if (tree_int_cst_lt (c, type_low_bound))
8451 return false;
8452 ok_for_low_bound = true;
8454 else
8455 ok_for_low_bound = false;
8457 /* Check if c <= type_high_bound. */
8458 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8460 if (tree_int_cst_lt (type_high_bound, c))
8461 return false;
8462 ok_for_high_bound = true;
8464 else
8465 ok_for_high_bound = false;
8467 /* If the constant fits both bounds, the result is known. */
8468 if (ok_for_low_bound && ok_for_high_bound)
8469 return true;
8471 /* Perform some generic filtering which may allow making a decision
8472 even if the bounds are not constant. First, negative integers
8473 never fit in unsigned types, */
8474 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8475 return false;
8477 /* Second, narrower types always fit in wider ones. */
8478 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8479 return true;
8481 /* Third, unsigned integers with top bit set never fit signed types. */
8482 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8484 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8485 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8487 /* When a tree_cst is converted to a wide-int, the precision
8488 is taken from the type. However, if the precision of the
8489 mode underneath the type is smaller than that, it is
8490 possible that the value will not fit. The test below
8491 fails if any bit is set between the sign bit of the
8492 underlying mode and the top bit of the type. */
8493 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8494 return false;
8496 else if (wi::neg_p (wi::to_wide (c)))
8497 return false;
8500 /* If we haven't been able to decide at this point, there nothing more we
8501 can check ourselves here. Look at the base type if we have one and it
8502 has the same precision. */
8503 if (TREE_CODE (type) == INTEGER_TYPE
8504 && TREE_TYPE (type) != 0
8505 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8507 type = TREE_TYPE (type);
8508 goto retry;
8511 /* Or to fits_to_tree_p, if nothing else. */
8512 return wi::fits_to_tree_p (wi::to_wide (c), type);
8515 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8516 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8517 represented (assuming two's-complement arithmetic) within the bit
8518 precision of the type are returned instead. */
8520 void
8521 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8523 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8524 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8525 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8526 else
8528 if (TYPE_UNSIGNED (type))
8529 mpz_set_ui (min, 0);
8530 else
8532 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8533 wi::to_mpz (mn, min, SIGNED);
8537 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8538 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8539 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8540 else
8542 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8543 wi::to_mpz (mn, max, TYPE_SIGN (type));
8547 /* Return true if VAR is an automatic variable defined in function FN. */
8549 bool
8550 auto_var_in_fn_p (const_tree var, const_tree fn)
8552 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8553 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8554 || TREE_CODE (var) == PARM_DECL)
8555 && ! TREE_STATIC (var))
8556 || TREE_CODE (var) == LABEL_DECL
8557 || TREE_CODE (var) == RESULT_DECL));
8560 /* Subprogram of following function. Called by walk_tree.
8562 Return *TP if it is an automatic variable or parameter of the
8563 function passed in as DATA. */
8565 static tree
8566 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8568 tree fn = (tree) data;
8570 if (TYPE_P (*tp))
8571 *walk_subtrees = 0;
8573 else if (DECL_P (*tp)
8574 && auto_var_in_fn_p (*tp, fn))
8575 return *tp;
8577 return NULL_TREE;
8580 /* Returns true if T is, contains, or refers to a type with variable
8581 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8582 arguments, but not the return type. If FN is nonzero, only return
8583 true if a modifier of the type or position of FN is a variable or
8584 parameter inside FN.
8586 This concept is more general than that of C99 'variably modified types':
8587 in C99, a struct type is never variably modified because a VLA may not
8588 appear as a structure member. However, in GNU C code like:
8590 struct S { int i[f()]; };
8592 is valid, and other languages may define similar constructs. */
8594 bool
8595 variably_modified_type_p (tree type, tree fn)
8597 tree t;
8599 /* Test if T is either variable (if FN is zero) or an expression containing
8600 a variable in FN. If TYPE isn't gimplified, return true also if
8601 gimplify_one_sizepos would gimplify the expression into a local
8602 variable. */
8603 #define RETURN_TRUE_IF_VAR(T) \
8604 do { tree _t = (T); \
8605 if (_t != NULL_TREE \
8606 && _t != error_mark_node \
8607 && TREE_CODE (_t) != INTEGER_CST \
8608 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8609 && (!fn \
8610 || (!TYPE_SIZES_GIMPLIFIED (type) \
8611 && !is_gimple_sizepos (_t)) \
8612 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8613 return true; } while (0)
8615 if (type == error_mark_node)
8616 return false;
8618 /* If TYPE itself has variable size, it is variably modified. */
8619 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8620 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8622 switch (TREE_CODE (type))
8624 case POINTER_TYPE:
8625 case REFERENCE_TYPE:
8626 case VECTOR_TYPE:
8627 /* Ada can have pointer types refering to themselves indirectly. */
8628 if (TREE_VISITED (type))
8629 return false;
8630 TREE_VISITED (type) = true;
8631 if (variably_modified_type_p (TREE_TYPE (type), fn))
8633 TREE_VISITED (type) = false;
8634 return true;
8636 TREE_VISITED (type) = false;
8637 break;
8639 case FUNCTION_TYPE:
8640 case METHOD_TYPE:
8641 /* If TYPE is a function type, it is variably modified if the
8642 return type is variably modified. */
8643 if (variably_modified_type_p (TREE_TYPE (type), fn))
8644 return true;
8645 break;
8647 case INTEGER_TYPE:
8648 case REAL_TYPE:
8649 case FIXED_POINT_TYPE:
8650 case ENUMERAL_TYPE:
8651 case BOOLEAN_TYPE:
8652 /* Scalar types are variably modified if their end points
8653 aren't constant. */
8654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8656 break;
8658 case RECORD_TYPE:
8659 case UNION_TYPE:
8660 case QUAL_UNION_TYPE:
8661 /* We can't see if any of the fields are variably-modified by the
8662 definition we normally use, since that would produce infinite
8663 recursion via pointers. */
8664 /* This is variably modified if some field's type is. */
8665 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8666 if (TREE_CODE (t) == FIELD_DECL)
8668 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8669 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8670 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8672 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8673 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8675 break;
8677 case ARRAY_TYPE:
8678 /* Do not call ourselves to avoid infinite recursion. This is
8679 variably modified if the element type is. */
8680 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8681 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8682 break;
8684 default:
8685 break;
8688 /* The current language may have other cases to check, but in general,
8689 all other types are not variably modified. */
8690 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8692 #undef RETURN_TRUE_IF_VAR
8695 /* Given a DECL or TYPE, return the scope in which it was declared, or
8696 NULL_TREE if there is no containing scope. */
8698 tree
8699 get_containing_scope (const_tree t)
8701 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8704 /* Return the innermost context enclosing DECL that is
8705 a FUNCTION_DECL, or zero if none. */
8707 tree
8708 decl_function_context (const_tree decl)
8710 tree context;
8712 if (TREE_CODE (decl) == ERROR_MARK)
8713 return 0;
8715 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8716 where we look up the function at runtime. Such functions always take
8717 a first argument of type 'pointer to real context'.
8719 C++ should really be fixed to use DECL_CONTEXT for the real context,
8720 and use something else for the "virtual context". */
8721 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8722 context
8723 = TYPE_MAIN_VARIANT
8724 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8725 else
8726 context = DECL_CONTEXT (decl);
8728 while (context && TREE_CODE (context) != FUNCTION_DECL)
8730 if (TREE_CODE (context) == BLOCK)
8731 context = BLOCK_SUPERCONTEXT (context);
8732 else
8733 context = get_containing_scope (context);
8736 return context;
8739 /* Return the innermost context enclosing DECL that is
8740 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8741 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8743 tree
8744 decl_type_context (const_tree decl)
8746 tree context = DECL_CONTEXT (decl);
8748 while (context)
8749 switch (TREE_CODE (context))
8751 case NAMESPACE_DECL:
8752 case TRANSLATION_UNIT_DECL:
8753 return NULL_TREE;
8755 case RECORD_TYPE:
8756 case UNION_TYPE:
8757 case QUAL_UNION_TYPE:
8758 return context;
8760 case TYPE_DECL:
8761 case FUNCTION_DECL:
8762 context = DECL_CONTEXT (context);
8763 break;
8765 case BLOCK:
8766 context = BLOCK_SUPERCONTEXT (context);
8767 break;
8769 default:
8770 gcc_unreachable ();
8773 return NULL_TREE;
8776 /* CALL is a CALL_EXPR. Return the declaration for the function
8777 called, or NULL_TREE if the called function cannot be
8778 determined. */
8780 tree
8781 get_callee_fndecl (const_tree call)
8783 tree addr;
8785 if (call == error_mark_node)
8786 return error_mark_node;
8788 /* It's invalid to call this function with anything but a
8789 CALL_EXPR. */
8790 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8792 /* The first operand to the CALL is the address of the function
8793 called. */
8794 addr = CALL_EXPR_FN (call);
8796 /* If there is no function, return early. */
8797 if (addr == NULL_TREE)
8798 return NULL_TREE;
8800 STRIP_NOPS (addr);
8802 /* If this is a readonly function pointer, extract its initial value. */
8803 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8804 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8805 && DECL_INITIAL (addr))
8806 addr = DECL_INITIAL (addr);
8808 /* If the address is just `&f' for some function `f', then we know
8809 that `f' is being called. */
8810 if (TREE_CODE (addr) == ADDR_EXPR
8811 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8812 return TREE_OPERAND (addr, 0);
8814 /* We couldn't figure out what was being called. */
8815 return NULL_TREE;
8818 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8819 return the associated function code, otherwise return CFN_LAST. */
8821 combined_fn
8822 get_call_combined_fn (const_tree call)
8824 /* It's invalid to call this function with anything but a CALL_EXPR. */
8825 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8827 if (!CALL_EXPR_FN (call))
8828 return as_combined_fn (CALL_EXPR_IFN (call));
8830 tree fndecl = get_callee_fndecl (call);
8831 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8832 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8834 return CFN_LAST;
8837 #define TREE_MEM_USAGE_SPACES 40
8839 /* Print debugging information about tree nodes generated during the compile,
8840 and any language-specific information. */
8842 void
8843 dump_tree_statistics (void)
8845 if (GATHER_STATISTICS)
8847 int i;
8848 int total_nodes, total_bytes;
8849 fprintf (stderr, "\nKind Nodes Bytes\n");
8850 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8851 total_nodes = total_bytes = 0;
8852 for (i = 0; i < (int) all_kinds; i++)
8854 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8855 tree_node_counts[i], tree_node_sizes[i]);
8856 total_nodes += tree_node_counts[i];
8857 total_bytes += tree_node_sizes[i];
8859 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8860 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8861 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8862 fprintf (stderr, "Code Nodes\n");
8863 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8864 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8865 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
8866 tree_code_counts[i]);
8867 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8868 fprintf (stderr, "\n");
8869 ssanames_print_statistics ();
8870 fprintf (stderr, "\n");
8871 phinodes_print_statistics ();
8872 fprintf (stderr, "\n");
8874 else
8875 fprintf (stderr, "(No per-node statistics)\n");
8877 print_type_hash_statistics ();
8878 print_debug_expr_statistics ();
8879 print_value_expr_statistics ();
8880 lang_hooks.print_statistics ();
8883 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8885 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8887 unsigned
8888 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8890 /* This relies on the raw feedback's top 4 bits being zero. */
8891 #define FEEDBACK(X) ((X) * 0x04c11db7)
8892 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8893 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8894 static const unsigned syndromes[16] =
8896 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8897 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8898 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8899 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8901 #undef FEEDBACK
8902 #undef SYNDROME
8904 value <<= (32 - bytes * 8);
8905 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8907 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8909 chksum = (chksum << 4) ^ feedback;
8912 return chksum;
8915 /* Generate a crc32 of a string. */
8917 unsigned
8918 crc32_string (unsigned chksum, const char *string)
8921 chksum = crc32_byte (chksum, *string);
8922 while (*string++);
8923 return chksum;
8926 /* P is a string that will be used in a symbol. Mask out any characters
8927 that are not valid in that context. */
8929 void
8930 clean_symbol_name (char *p)
8932 for (; *p; p++)
8933 if (! (ISALNUM (*p)
8934 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8935 || *p == '$'
8936 #endif
8937 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8938 || *p == '.'
8939 #endif
8941 *p = '_';
8944 /* For anonymous aggregate types, we need some sort of name to
8945 hold on to. In practice, this should not appear, but it should
8946 not be harmful if it does. */
8947 bool
8948 anon_aggrname_p(const_tree id_node)
8950 #ifndef NO_DOT_IN_LABEL
8951 return (IDENTIFIER_POINTER (id_node)[0] == '.'
8952 && IDENTIFIER_POINTER (id_node)[1] == '_');
8953 #else /* NO_DOT_IN_LABEL */
8954 #ifndef NO_DOLLAR_IN_LABEL
8955 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
8956 && IDENTIFIER_POINTER (id_node)[1] == '_');
8957 #else /* NO_DOLLAR_IN_LABEL */
8958 #define ANON_AGGRNAME_PREFIX "__anon_"
8959 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
8960 sizeof (ANON_AGGRNAME_PREFIX) - 1));
8961 #endif /* NO_DOLLAR_IN_LABEL */
8962 #endif /* NO_DOT_IN_LABEL */
8965 /* Return a format for an anonymous aggregate name. */
8966 const char *
8967 anon_aggrname_format()
8969 #ifndef NO_DOT_IN_LABEL
8970 return "._%d";
8971 #else /* NO_DOT_IN_LABEL */
8972 #ifndef NO_DOLLAR_IN_LABEL
8973 return "$_%d";
8974 #else /* NO_DOLLAR_IN_LABEL */
8975 return "__anon_%d";
8976 #endif /* NO_DOLLAR_IN_LABEL */
8977 #endif /* NO_DOT_IN_LABEL */
8980 /* Generate a name for a special-purpose function.
8981 The generated name may need to be unique across the whole link.
8982 Changes to this function may also require corresponding changes to
8983 xstrdup_mask_random.
8984 TYPE is some string to identify the purpose of this function to the
8985 linker or collect2; it must start with an uppercase letter,
8986 one of:
8987 I - for constructors
8988 D - for destructors
8989 N - for C++ anonymous namespaces
8990 F - for DWARF unwind frame information. */
8992 tree
8993 get_file_function_name (const char *type)
8995 char *buf;
8996 const char *p;
8997 char *q;
8999 /* If we already have a name we know to be unique, just use that. */
9000 if (first_global_object_name)
9001 p = q = ASTRDUP (first_global_object_name);
9002 /* If the target is handling the constructors/destructors, they
9003 will be local to this file and the name is only necessary for
9004 debugging purposes.
9005 We also assign sub_I and sub_D sufixes to constructors called from
9006 the global static constructors. These are always local. */
9007 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9008 || (strncmp (type, "sub_", 4) == 0
9009 && (type[4] == 'I' || type[4] == 'D')))
9011 const char *file = main_input_filename;
9012 if (! file)
9013 file = LOCATION_FILE (input_location);
9014 /* Just use the file's basename, because the full pathname
9015 might be quite long. */
9016 p = q = ASTRDUP (lbasename (file));
9018 else
9020 /* Otherwise, the name must be unique across the entire link.
9021 We don't have anything that we know to be unique to this translation
9022 unit, so use what we do have and throw in some randomness. */
9023 unsigned len;
9024 const char *name = weak_global_object_name;
9025 const char *file = main_input_filename;
9027 if (! name)
9028 name = "";
9029 if (! file)
9030 file = LOCATION_FILE (input_location);
9032 len = strlen (file);
9033 q = (char *) alloca (9 + 19 + len + 1);
9034 memcpy (q, file, len + 1);
9036 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9037 crc32_string (0, name), get_random_seed (false));
9039 p = q;
9042 clean_symbol_name (q);
9043 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9044 + strlen (type));
9046 /* Set up the name of the file-level functions we may need.
9047 Use a global object (which is already required to be unique over
9048 the program) rather than the file name (which imposes extra
9049 constraints). */
9050 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9052 return get_identifier (buf);
9055 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9057 /* Complain that the tree code of NODE does not match the expected 0
9058 terminated list of trailing codes. The trailing code list can be
9059 empty, for a more vague error message. FILE, LINE, and FUNCTION
9060 are of the caller. */
9062 void
9063 tree_check_failed (const_tree node, const char *file,
9064 int line, const char *function, ...)
9066 va_list args;
9067 const char *buffer;
9068 unsigned length = 0;
9069 enum tree_code code;
9071 va_start (args, function);
9072 while ((code = (enum tree_code) va_arg (args, int)))
9073 length += 4 + strlen (get_tree_code_name (code));
9074 va_end (args);
9075 if (length)
9077 char *tmp;
9078 va_start (args, function);
9079 length += strlen ("expected ");
9080 buffer = tmp = (char *) alloca (length);
9081 length = 0;
9082 while ((code = (enum tree_code) va_arg (args, int)))
9084 const char *prefix = length ? " or " : "expected ";
9086 strcpy (tmp + length, prefix);
9087 length += strlen (prefix);
9088 strcpy (tmp + length, get_tree_code_name (code));
9089 length += strlen (get_tree_code_name (code));
9091 va_end (args);
9093 else
9094 buffer = "unexpected node";
9096 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9097 buffer, get_tree_code_name (TREE_CODE (node)),
9098 function, trim_filename (file), line);
9101 /* Complain that the tree code of NODE does match the expected 0
9102 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9103 the caller. */
9105 void
9106 tree_not_check_failed (const_tree node, const char *file,
9107 int line, const char *function, ...)
9109 va_list args;
9110 char *buffer;
9111 unsigned length = 0;
9112 enum tree_code code;
9114 va_start (args, function);
9115 while ((code = (enum tree_code) va_arg (args, int)))
9116 length += 4 + strlen (get_tree_code_name (code));
9117 va_end (args);
9118 va_start (args, function);
9119 buffer = (char *) alloca (length);
9120 length = 0;
9121 while ((code = (enum tree_code) va_arg (args, int)))
9123 if (length)
9125 strcpy (buffer + length, " or ");
9126 length += 4;
9128 strcpy (buffer + length, get_tree_code_name (code));
9129 length += strlen (get_tree_code_name (code));
9131 va_end (args);
9133 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9134 buffer, get_tree_code_name (TREE_CODE (node)),
9135 function, trim_filename (file), line);
9138 /* Similar to tree_check_failed, except that we check for a class of tree
9139 code, given in CL. */
9141 void
9142 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9143 const char *file, int line, const char *function)
9145 internal_error
9146 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9147 TREE_CODE_CLASS_STRING (cl),
9148 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9149 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9152 /* Similar to tree_check_failed, except that instead of specifying a
9153 dozen codes, use the knowledge that they're all sequential. */
9155 void
9156 tree_range_check_failed (const_tree node, const char *file, int line,
9157 const char *function, enum tree_code c1,
9158 enum tree_code c2)
9160 char *buffer;
9161 unsigned length = 0;
9162 unsigned int c;
9164 for (c = c1; c <= c2; ++c)
9165 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9167 length += strlen ("expected ");
9168 buffer = (char *) alloca (length);
9169 length = 0;
9171 for (c = c1; c <= c2; ++c)
9173 const char *prefix = length ? " or " : "expected ";
9175 strcpy (buffer + length, prefix);
9176 length += strlen (prefix);
9177 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9178 length += strlen (get_tree_code_name ((enum tree_code) c));
9181 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9182 buffer, get_tree_code_name (TREE_CODE (node)),
9183 function, trim_filename (file), line);
9187 /* Similar to tree_check_failed, except that we check that a tree does
9188 not have the specified code, given in CL. */
9190 void
9191 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9192 const char *file, int line, const char *function)
9194 internal_error
9195 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9196 TREE_CODE_CLASS_STRING (cl),
9197 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9198 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9202 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9204 void
9205 omp_clause_check_failed (const_tree node, const char *file, int line,
9206 const char *function, enum omp_clause_code code)
9208 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9209 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9210 function, trim_filename (file), line);
9214 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9216 void
9217 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9218 const char *function, enum omp_clause_code c1,
9219 enum omp_clause_code c2)
9221 char *buffer;
9222 unsigned length = 0;
9223 unsigned int c;
9225 for (c = c1; c <= c2; ++c)
9226 length += 4 + strlen (omp_clause_code_name[c]);
9228 length += strlen ("expected ");
9229 buffer = (char *) alloca (length);
9230 length = 0;
9232 for (c = c1; c <= c2; ++c)
9234 const char *prefix = length ? " or " : "expected ";
9236 strcpy (buffer + length, prefix);
9237 length += strlen (prefix);
9238 strcpy (buffer + length, omp_clause_code_name[c]);
9239 length += strlen (omp_clause_code_name[c]);
9242 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9243 buffer, omp_clause_code_name[TREE_CODE (node)],
9244 function, trim_filename (file), line);
9248 #undef DEFTREESTRUCT
9249 #define DEFTREESTRUCT(VAL, NAME) NAME,
9251 static const char *ts_enum_names[] = {
9252 #include "treestruct.def"
9254 #undef DEFTREESTRUCT
9256 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9258 /* Similar to tree_class_check_failed, except that we check for
9259 whether CODE contains the tree structure identified by EN. */
9261 void
9262 tree_contains_struct_check_failed (const_tree node,
9263 const enum tree_node_structure_enum en,
9264 const char *file, int line,
9265 const char *function)
9267 internal_error
9268 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9269 TS_ENUM_NAME (en),
9270 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9274 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9275 (dynamically sized) vector. */
9277 void
9278 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9279 const char *function)
9281 internal_error
9282 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9283 idx + 1, len, function, trim_filename (file), line);
9286 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9287 (dynamically sized) vector. */
9289 void
9290 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9291 const char *function)
9293 internal_error
9294 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9295 idx + 1, len, function, trim_filename (file), line);
9298 /* Similar to above, except that the check is for the bounds of the operand
9299 vector of an expression node EXP. */
9301 void
9302 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9303 int line, const char *function)
9305 enum tree_code code = TREE_CODE (exp);
9306 internal_error
9307 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9308 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9309 function, trim_filename (file), line);
9312 /* Similar to above, except that the check is for the number of
9313 operands of an OMP_CLAUSE node. */
9315 void
9316 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9317 int line, const char *function)
9319 internal_error
9320 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9321 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9322 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9323 trim_filename (file), line);
9325 #endif /* ENABLE_TREE_CHECKING */
9327 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9328 and mapped to the machine mode MODE. Initialize its fields and build
9329 the information necessary for debugging output. */
9331 static tree
9332 make_vector_type (tree innertype, int nunits, machine_mode mode)
9334 tree t;
9335 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9337 t = make_node (VECTOR_TYPE);
9338 TREE_TYPE (t) = mv_innertype;
9339 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9340 SET_TYPE_MODE (t, mode);
9342 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9343 SET_TYPE_STRUCTURAL_EQUALITY (t);
9344 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9345 || mode != VOIDmode)
9346 && !VECTOR_BOOLEAN_TYPE_P (t))
9347 TYPE_CANONICAL (t)
9348 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9350 layout_type (t);
9352 hashval_t hash = type_hash_canon_hash (t);
9353 t = type_hash_canon (hash, t);
9355 /* We have built a main variant, based on the main variant of the
9356 inner type. Use it to build the variant we return. */
9357 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9358 && TREE_TYPE (t) != innertype)
9359 return build_type_attribute_qual_variant (t,
9360 TYPE_ATTRIBUTES (innertype),
9361 TYPE_QUALS (innertype));
9363 return t;
9366 static tree
9367 make_or_reuse_type (unsigned size, int unsignedp)
9369 int i;
9371 if (size == INT_TYPE_SIZE)
9372 return unsignedp ? unsigned_type_node : integer_type_node;
9373 if (size == CHAR_TYPE_SIZE)
9374 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9375 if (size == SHORT_TYPE_SIZE)
9376 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9377 if (size == LONG_TYPE_SIZE)
9378 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9379 if (size == LONG_LONG_TYPE_SIZE)
9380 return (unsignedp ? long_long_unsigned_type_node
9381 : long_long_integer_type_node);
9383 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9384 if (size == int_n_data[i].bitsize
9385 && int_n_enabled_p[i])
9386 return (unsignedp ? int_n_trees[i].unsigned_type
9387 : int_n_trees[i].signed_type);
9389 if (unsignedp)
9390 return make_unsigned_type (size);
9391 else
9392 return make_signed_type (size);
9395 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9397 static tree
9398 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9400 if (satp)
9402 if (size == SHORT_FRACT_TYPE_SIZE)
9403 return unsignedp ? sat_unsigned_short_fract_type_node
9404 : sat_short_fract_type_node;
9405 if (size == FRACT_TYPE_SIZE)
9406 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9407 if (size == LONG_FRACT_TYPE_SIZE)
9408 return unsignedp ? sat_unsigned_long_fract_type_node
9409 : sat_long_fract_type_node;
9410 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9411 return unsignedp ? sat_unsigned_long_long_fract_type_node
9412 : sat_long_long_fract_type_node;
9414 else
9416 if (size == SHORT_FRACT_TYPE_SIZE)
9417 return unsignedp ? unsigned_short_fract_type_node
9418 : short_fract_type_node;
9419 if (size == FRACT_TYPE_SIZE)
9420 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9421 if (size == LONG_FRACT_TYPE_SIZE)
9422 return unsignedp ? unsigned_long_fract_type_node
9423 : long_fract_type_node;
9424 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9425 return unsignedp ? unsigned_long_long_fract_type_node
9426 : long_long_fract_type_node;
9429 return make_fract_type (size, unsignedp, satp);
9432 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9434 static tree
9435 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9437 if (satp)
9439 if (size == SHORT_ACCUM_TYPE_SIZE)
9440 return unsignedp ? sat_unsigned_short_accum_type_node
9441 : sat_short_accum_type_node;
9442 if (size == ACCUM_TYPE_SIZE)
9443 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9444 if (size == LONG_ACCUM_TYPE_SIZE)
9445 return unsignedp ? sat_unsigned_long_accum_type_node
9446 : sat_long_accum_type_node;
9447 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9448 return unsignedp ? sat_unsigned_long_long_accum_type_node
9449 : sat_long_long_accum_type_node;
9451 else
9453 if (size == SHORT_ACCUM_TYPE_SIZE)
9454 return unsignedp ? unsigned_short_accum_type_node
9455 : short_accum_type_node;
9456 if (size == ACCUM_TYPE_SIZE)
9457 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9458 if (size == LONG_ACCUM_TYPE_SIZE)
9459 return unsignedp ? unsigned_long_accum_type_node
9460 : long_accum_type_node;
9461 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9462 return unsignedp ? unsigned_long_long_accum_type_node
9463 : long_long_accum_type_node;
9466 return make_accum_type (size, unsignedp, satp);
9470 /* Create an atomic variant node for TYPE. This routine is called
9471 during initialization of data types to create the 5 basic atomic
9472 types. The generic build_variant_type function requires these to
9473 already be set up in order to function properly, so cannot be
9474 called from there. If ALIGN is non-zero, then ensure alignment is
9475 overridden to this value. */
9477 static tree
9478 build_atomic_base (tree type, unsigned int align)
9480 tree t;
9482 /* Make sure its not already registered. */
9483 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9484 return t;
9486 t = build_variant_type_copy (type);
9487 set_type_quals (t, TYPE_QUAL_ATOMIC);
9489 if (align)
9490 SET_TYPE_ALIGN (t, align);
9492 return t;
9495 /* Information about the _FloatN and _FloatNx types. This must be in
9496 the same order as the corresponding TI_* enum values. */
9497 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9499 { 16, false },
9500 { 32, false },
9501 { 64, false },
9502 { 128, false },
9503 { 32, true },
9504 { 64, true },
9505 { 128, true },
9509 /* Create nodes for all integer types (and error_mark_node) using the sizes
9510 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9512 void
9513 build_common_tree_nodes (bool signed_char)
9515 int i;
9517 error_mark_node = make_node (ERROR_MARK);
9518 TREE_TYPE (error_mark_node) = error_mark_node;
9520 initialize_sizetypes ();
9522 /* Define both `signed char' and `unsigned char'. */
9523 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9524 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9525 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9526 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9528 /* Define `char', which is like either `signed char' or `unsigned char'
9529 but not the same as either. */
9530 char_type_node
9531 = (signed_char
9532 ? make_signed_type (CHAR_TYPE_SIZE)
9533 : make_unsigned_type (CHAR_TYPE_SIZE));
9534 TYPE_STRING_FLAG (char_type_node) = 1;
9536 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9537 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9538 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9539 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9540 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9541 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9542 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9543 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9545 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9547 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9548 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9549 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9550 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9552 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9553 && int_n_enabled_p[i])
9555 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9556 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9560 /* Define a boolean type. This type only represents boolean values but
9561 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9562 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9563 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9564 TYPE_PRECISION (boolean_type_node) = 1;
9565 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9567 /* Define what type to use for size_t. */
9568 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9569 size_type_node = unsigned_type_node;
9570 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9571 size_type_node = long_unsigned_type_node;
9572 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9573 size_type_node = long_long_unsigned_type_node;
9574 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9575 size_type_node = short_unsigned_type_node;
9576 else
9578 int i;
9580 size_type_node = NULL_TREE;
9581 for (i = 0; i < NUM_INT_N_ENTS; i++)
9582 if (int_n_enabled_p[i])
9584 char name[50];
9585 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9587 if (strcmp (name, SIZE_TYPE) == 0)
9589 size_type_node = int_n_trees[i].unsigned_type;
9592 if (size_type_node == NULL_TREE)
9593 gcc_unreachable ();
9596 /* Define what type to use for ptrdiff_t. */
9597 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9598 ptrdiff_type_node = integer_type_node;
9599 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9600 ptrdiff_type_node = long_integer_type_node;
9601 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9602 ptrdiff_type_node = long_long_integer_type_node;
9603 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9604 ptrdiff_type_node = short_integer_type_node;
9605 else
9607 ptrdiff_type_node = NULL_TREE;
9608 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9609 if (int_n_enabled_p[i])
9611 char name[50];
9612 sprintf (name, "__int%d", int_n_data[i].bitsize);
9613 if (strcmp (name, PTRDIFF_TYPE) == 0)
9614 ptrdiff_type_node = int_n_trees[i].signed_type;
9616 if (ptrdiff_type_node == NULL_TREE)
9617 gcc_unreachable ();
9620 /* Fill in the rest of the sized types. Reuse existing type nodes
9621 when possible. */
9622 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9623 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9624 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9625 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9626 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9628 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9629 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9630 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9631 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9632 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9634 /* Don't call build_qualified type for atomics. That routine does
9635 special processing for atomics, and until they are initialized
9636 it's better not to make that call.
9638 Check to see if there is a target override for atomic types. */
9640 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9641 targetm.atomic_align_for_mode (QImode));
9642 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9643 targetm.atomic_align_for_mode (HImode));
9644 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9645 targetm.atomic_align_for_mode (SImode));
9646 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9647 targetm.atomic_align_for_mode (DImode));
9648 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9649 targetm.atomic_align_for_mode (TImode));
9651 access_public_node = get_identifier ("public");
9652 access_protected_node = get_identifier ("protected");
9653 access_private_node = get_identifier ("private");
9655 /* Define these next since types below may used them. */
9656 integer_zero_node = build_int_cst (integer_type_node, 0);
9657 integer_one_node = build_int_cst (integer_type_node, 1);
9658 integer_three_node = build_int_cst (integer_type_node, 3);
9659 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9661 size_zero_node = size_int (0);
9662 size_one_node = size_int (1);
9663 bitsize_zero_node = bitsize_int (0);
9664 bitsize_one_node = bitsize_int (1);
9665 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9667 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9668 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9670 void_type_node = make_node (VOID_TYPE);
9671 layout_type (void_type_node);
9673 pointer_bounds_type_node = targetm.chkp_bound_type ();
9675 /* We are not going to have real types in C with less than byte alignment,
9676 so we might as well not have any types that claim to have it. */
9677 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9678 TYPE_USER_ALIGN (void_type_node) = 0;
9680 void_node = make_node (VOID_CST);
9681 TREE_TYPE (void_node) = void_type_node;
9683 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9684 layout_type (TREE_TYPE (null_pointer_node));
9686 ptr_type_node = build_pointer_type (void_type_node);
9687 const_ptr_type_node
9688 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9689 for (unsigned i = 0;
9690 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9691 ++i)
9692 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9694 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9696 float_type_node = make_node (REAL_TYPE);
9697 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9698 layout_type (float_type_node);
9700 double_type_node = make_node (REAL_TYPE);
9701 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9702 layout_type (double_type_node);
9704 long_double_type_node = make_node (REAL_TYPE);
9705 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9706 layout_type (long_double_type_node);
9708 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9710 int n = floatn_nx_types[i].n;
9711 bool extended = floatn_nx_types[i].extended;
9712 scalar_float_mode mode;
9713 if (!targetm.floatn_mode (n, extended).exists (&mode))
9714 continue;
9715 int precision = GET_MODE_PRECISION (mode);
9716 /* Work around the rs6000 KFmode having precision 113 not
9717 128. */
9718 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9719 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9720 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9721 if (!extended)
9722 gcc_assert (min_precision == n);
9723 if (precision < min_precision)
9724 precision = min_precision;
9725 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9726 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9727 layout_type (FLOATN_NX_TYPE_NODE (i));
9728 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9731 float_ptr_type_node = build_pointer_type (float_type_node);
9732 double_ptr_type_node = build_pointer_type (double_type_node);
9733 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9734 integer_ptr_type_node = build_pointer_type (integer_type_node);
9736 /* Fixed size integer types. */
9737 uint16_type_node = make_or_reuse_type (16, 1);
9738 uint32_type_node = make_or_reuse_type (32, 1);
9739 uint64_type_node = make_or_reuse_type (64, 1);
9741 /* Decimal float types. */
9742 dfloat32_type_node = make_node (REAL_TYPE);
9743 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9744 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9745 layout_type (dfloat32_type_node);
9746 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9748 dfloat64_type_node = make_node (REAL_TYPE);
9749 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9750 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9751 layout_type (dfloat64_type_node);
9752 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9754 dfloat128_type_node = make_node (REAL_TYPE);
9755 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9756 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9757 layout_type (dfloat128_type_node);
9758 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9760 complex_integer_type_node = build_complex_type (integer_type_node, true);
9761 complex_float_type_node = build_complex_type (float_type_node, true);
9762 complex_double_type_node = build_complex_type (double_type_node, true);
9763 complex_long_double_type_node = build_complex_type (long_double_type_node,
9764 true);
9766 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9768 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9769 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9770 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9773 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9774 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9775 sat_ ## KIND ## _type_node = \
9776 make_sat_signed_ ## KIND ## _type (SIZE); \
9777 sat_unsigned_ ## KIND ## _type_node = \
9778 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9779 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9780 unsigned_ ## KIND ## _type_node = \
9781 make_unsigned_ ## KIND ## _type (SIZE);
9783 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9784 sat_ ## WIDTH ## KIND ## _type_node = \
9785 make_sat_signed_ ## KIND ## _type (SIZE); \
9786 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9787 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9788 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9789 unsigned_ ## WIDTH ## KIND ## _type_node = \
9790 make_unsigned_ ## KIND ## _type (SIZE);
9792 /* Make fixed-point type nodes based on four different widths. */
9793 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9794 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9795 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9796 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9797 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9799 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9800 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9801 NAME ## _type_node = \
9802 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9803 u ## NAME ## _type_node = \
9804 make_or_reuse_unsigned_ ## KIND ## _type \
9805 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9806 sat_ ## NAME ## _type_node = \
9807 make_or_reuse_sat_signed_ ## KIND ## _type \
9808 (GET_MODE_BITSIZE (MODE ## mode)); \
9809 sat_u ## NAME ## _type_node = \
9810 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9811 (GET_MODE_BITSIZE (U ## MODE ## mode));
9813 /* Fixed-point type and mode nodes. */
9814 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9815 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9816 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9817 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9818 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9819 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9820 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9821 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9822 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9823 MAKE_FIXED_MODE_NODE (accum, da, DA)
9824 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9827 tree t = targetm.build_builtin_va_list ();
9829 /* Many back-ends define record types without setting TYPE_NAME.
9830 If we copied the record type here, we'd keep the original
9831 record type without a name. This breaks name mangling. So,
9832 don't copy record types and let c_common_nodes_and_builtins()
9833 declare the type to be __builtin_va_list. */
9834 if (TREE_CODE (t) != RECORD_TYPE)
9835 t = build_variant_type_copy (t);
9837 va_list_type_node = t;
9841 /* Modify DECL for given flags.
9842 TM_PURE attribute is set only on types, so the function will modify
9843 DECL's type when ECF_TM_PURE is used. */
9845 void
9846 set_call_expr_flags (tree decl, int flags)
9848 if (flags & ECF_NOTHROW)
9849 TREE_NOTHROW (decl) = 1;
9850 if (flags & ECF_CONST)
9851 TREE_READONLY (decl) = 1;
9852 if (flags & ECF_PURE)
9853 DECL_PURE_P (decl) = 1;
9854 if (flags & ECF_LOOPING_CONST_OR_PURE)
9855 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9856 if (flags & ECF_NOVOPS)
9857 DECL_IS_NOVOPS (decl) = 1;
9858 if (flags & ECF_NORETURN)
9859 TREE_THIS_VOLATILE (decl) = 1;
9860 if (flags & ECF_MALLOC)
9861 DECL_IS_MALLOC (decl) = 1;
9862 if (flags & ECF_RETURNS_TWICE)
9863 DECL_IS_RETURNS_TWICE (decl) = 1;
9864 if (flags & ECF_LEAF)
9865 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9866 NULL, DECL_ATTRIBUTES (decl));
9867 if (flags & ECF_COLD)
9868 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9869 NULL, DECL_ATTRIBUTES (decl));
9870 if (flags & ECF_RET1)
9871 DECL_ATTRIBUTES (decl)
9872 = tree_cons (get_identifier ("fn spec"),
9873 build_tree_list (NULL_TREE, build_string (1, "1")),
9874 DECL_ATTRIBUTES (decl));
9875 if ((flags & ECF_TM_PURE) && flag_tm)
9876 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9877 /* Looping const or pure is implied by noreturn.
9878 There is currently no way to declare looping const or looping pure alone. */
9879 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9880 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9884 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9886 static void
9887 local_define_builtin (const char *name, tree type, enum built_in_function code,
9888 const char *library_name, int ecf_flags)
9890 tree decl;
9892 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9893 library_name, NULL_TREE);
9894 set_call_expr_flags (decl, ecf_flags);
9896 set_builtin_decl (code, decl, true);
9899 /* Call this function after instantiating all builtins that the language
9900 front end cares about. This will build the rest of the builtins
9901 and internal functions that are relied upon by the tree optimizers and
9902 the middle-end. */
9904 void
9905 build_common_builtin_nodes (void)
9907 tree tmp, ftype;
9908 int ecf_flags;
9910 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9911 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9913 ftype = build_function_type (void_type_node, void_list_node);
9914 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9915 local_define_builtin ("__builtin_unreachable", ftype,
9916 BUILT_IN_UNREACHABLE,
9917 "__builtin_unreachable",
9918 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9919 | ECF_CONST | ECF_COLD);
9920 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9921 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9922 "abort",
9923 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9926 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9927 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9929 ftype = build_function_type_list (ptr_type_node,
9930 ptr_type_node, const_ptr_type_node,
9931 size_type_node, NULL_TREE);
9933 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9934 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9935 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9936 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9937 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9938 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9941 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9943 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9944 const_ptr_type_node, size_type_node,
9945 NULL_TREE);
9946 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9947 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9950 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9952 ftype = build_function_type_list (ptr_type_node,
9953 ptr_type_node, integer_type_node,
9954 size_type_node, NULL_TREE);
9955 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9956 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9959 /* If we're checking the stack, `alloca' can throw. */
9960 const int alloca_flags
9961 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9963 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9965 ftype = build_function_type_list (ptr_type_node,
9966 size_type_node, NULL_TREE);
9967 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9968 "alloca", alloca_flags);
9971 ftype = build_function_type_list (ptr_type_node, size_type_node,
9972 size_type_node, NULL_TREE);
9973 local_define_builtin ("__builtin_alloca_with_align", ftype,
9974 BUILT_IN_ALLOCA_WITH_ALIGN,
9975 "__builtin_alloca_with_align",
9976 alloca_flags);
9978 ftype = build_function_type_list (ptr_type_node, size_type_node,
9979 size_type_node, size_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9981 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9982 "__builtin_alloca_with_align_and_max",
9983 alloca_flags);
9985 ftype = build_function_type_list (void_type_node,
9986 ptr_type_node, ptr_type_node,
9987 ptr_type_node, NULL_TREE);
9988 local_define_builtin ("__builtin_init_trampoline", ftype,
9989 BUILT_IN_INIT_TRAMPOLINE,
9990 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9991 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9992 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9993 "__builtin_init_heap_trampoline",
9994 ECF_NOTHROW | ECF_LEAF);
9995 local_define_builtin ("__builtin_init_descriptor", ftype,
9996 BUILT_IN_INIT_DESCRIPTOR,
9997 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9999 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10000 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10001 BUILT_IN_ADJUST_TRAMPOLINE,
10002 "__builtin_adjust_trampoline",
10003 ECF_CONST | ECF_NOTHROW);
10004 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10005 BUILT_IN_ADJUST_DESCRIPTOR,
10006 "__builtin_adjust_descriptor",
10007 ECF_CONST | ECF_NOTHROW);
10009 ftype = build_function_type_list (void_type_node,
10010 ptr_type_node, ptr_type_node, NULL_TREE);
10011 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10012 BUILT_IN_NONLOCAL_GOTO,
10013 "__builtin_nonlocal_goto",
10014 ECF_NORETURN | ECF_NOTHROW);
10016 ftype = build_function_type_list (void_type_node,
10017 ptr_type_node, ptr_type_node, NULL_TREE);
10018 local_define_builtin ("__builtin_setjmp_setup", ftype,
10019 BUILT_IN_SETJMP_SETUP,
10020 "__builtin_setjmp_setup", ECF_NOTHROW);
10022 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10023 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10024 BUILT_IN_SETJMP_RECEIVER,
10025 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10027 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10028 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10029 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10031 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10032 local_define_builtin ("__builtin_stack_restore", ftype,
10033 BUILT_IN_STACK_RESTORE,
10034 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10036 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10037 const_ptr_type_node, size_type_node,
10038 NULL_TREE);
10039 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10040 "__builtin_memcmp_eq",
10041 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10043 /* If there's a possibility that we might use the ARM EABI, build the
10044 alternate __cxa_end_cleanup node used to resume from C++. */
10045 if (targetm.arm_eabi_unwinder)
10047 ftype = build_function_type_list (void_type_node, NULL_TREE);
10048 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10049 BUILT_IN_CXA_END_CLEANUP,
10050 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10053 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10054 local_define_builtin ("__builtin_unwind_resume", ftype,
10055 BUILT_IN_UNWIND_RESUME,
10056 ((targetm_common.except_unwind_info (&global_options)
10057 == UI_SJLJ)
10058 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10059 ECF_NORETURN);
10061 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10063 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10064 NULL_TREE);
10065 local_define_builtin ("__builtin_return_address", ftype,
10066 BUILT_IN_RETURN_ADDRESS,
10067 "__builtin_return_address",
10068 ECF_NOTHROW);
10071 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10072 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10074 ftype = build_function_type_list (void_type_node, ptr_type_node,
10075 ptr_type_node, NULL_TREE);
10076 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10077 local_define_builtin ("__cyg_profile_func_enter", ftype,
10078 BUILT_IN_PROFILE_FUNC_ENTER,
10079 "__cyg_profile_func_enter", 0);
10080 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10081 local_define_builtin ("__cyg_profile_func_exit", ftype,
10082 BUILT_IN_PROFILE_FUNC_EXIT,
10083 "__cyg_profile_func_exit", 0);
10086 /* The exception object and filter values from the runtime. The argument
10087 must be zero before exception lowering, i.e. from the front end. After
10088 exception lowering, it will be the region number for the exception
10089 landing pad. These functions are PURE instead of CONST to prevent
10090 them from being hoisted past the exception edge that will initialize
10091 its value in the landing pad. */
10092 ftype = build_function_type_list (ptr_type_node,
10093 integer_type_node, NULL_TREE);
10094 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10095 /* Only use TM_PURE if we have TM language support. */
10096 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10097 ecf_flags |= ECF_TM_PURE;
10098 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10099 "__builtin_eh_pointer", ecf_flags);
10101 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10102 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10103 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10104 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10106 ftype = build_function_type_list (void_type_node,
10107 integer_type_node, integer_type_node,
10108 NULL_TREE);
10109 local_define_builtin ("__builtin_eh_copy_values", ftype,
10110 BUILT_IN_EH_COPY_VALUES,
10111 "__builtin_eh_copy_values", ECF_NOTHROW);
10113 /* Complex multiplication and division. These are handled as builtins
10114 rather than optabs because emit_library_call_value doesn't support
10115 complex. Further, we can do slightly better with folding these
10116 beasties if the real and complex parts of the arguments are separate. */
10118 int mode;
10120 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10122 char mode_name_buf[4], *q;
10123 const char *p;
10124 enum built_in_function mcode, dcode;
10125 tree type, inner_type;
10126 const char *prefix = "__";
10128 if (targetm.libfunc_gnu_prefix)
10129 prefix = "__gnu_";
10131 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10132 if (type == NULL)
10133 continue;
10134 inner_type = TREE_TYPE (type);
10136 ftype = build_function_type_list (type, inner_type, inner_type,
10137 inner_type, inner_type, NULL_TREE);
10139 mcode = ((enum built_in_function)
10140 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10141 dcode = ((enum built_in_function)
10142 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10144 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10145 *q = TOLOWER (*p);
10146 *q = '\0';
10148 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10149 NULL);
10150 local_define_builtin (built_in_names[mcode], ftype, mcode,
10151 built_in_names[mcode],
10152 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10154 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10155 NULL);
10156 local_define_builtin (built_in_names[dcode], ftype, dcode,
10157 built_in_names[dcode],
10158 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10162 init_internal_fns ();
10165 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10166 better way.
10168 If we requested a pointer to a vector, build up the pointers that
10169 we stripped off while looking for the inner type. Similarly for
10170 return values from functions.
10172 The argument TYPE is the top of the chain, and BOTTOM is the
10173 new type which we will point to. */
10175 tree
10176 reconstruct_complex_type (tree type, tree bottom)
10178 tree inner, outer;
10180 if (TREE_CODE (type) == POINTER_TYPE)
10182 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10183 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10184 TYPE_REF_CAN_ALIAS_ALL (type));
10186 else if (TREE_CODE (type) == REFERENCE_TYPE)
10188 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10189 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10190 TYPE_REF_CAN_ALIAS_ALL (type));
10192 else if (TREE_CODE (type) == ARRAY_TYPE)
10194 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10195 outer = build_array_type (inner, TYPE_DOMAIN (type));
10197 else if (TREE_CODE (type) == FUNCTION_TYPE)
10199 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10200 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10202 else if (TREE_CODE (type) == METHOD_TYPE)
10204 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10205 /* The build_method_type_directly() routine prepends 'this' to argument list,
10206 so we must compensate by getting rid of it. */
10207 outer
10208 = build_method_type_directly
10209 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10210 inner,
10211 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10213 else if (TREE_CODE (type) == OFFSET_TYPE)
10215 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10216 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10218 else
10219 return bottom;
10221 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10222 TYPE_QUALS (type));
10225 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10226 the inner type. */
10227 tree
10228 build_vector_type_for_mode (tree innertype, machine_mode mode)
10230 int nunits;
10231 unsigned int bitsize;
10233 switch (GET_MODE_CLASS (mode))
10235 case MODE_VECTOR_INT:
10236 case MODE_VECTOR_FLOAT:
10237 case MODE_VECTOR_FRACT:
10238 case MODE_VECTOR_UFRACT:
10239 case MODE_VECTOR_ACCUM:
10240 case MODE_VECTOR_UACCUM:
10241 nunits = GET_MODE_NUNITS (mode);
10242 break;
10244 case MODE_INT:
10245 /* Check that there are no leftover bits. */
10246 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10247 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10248 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10249 break;
10251 default:
10252 gcc_unreachable ();
10255 return make_vector_type (innertype, nunits, mode);
10258 /* Similarly, but takes the inner type and number of units, which must be
10259 a power of two. */
10261 tree
10262 build_vector_type (tree innertype, int nunits)
10264 return make_vector_type (innertype, nunits, VOIDmode);
10267 /* Build truth vector with specified length and number of units. */
10269 tree
10270 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10272 machine_mode mask_mode
10273 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10275 unsigned HOST_WIDE_INT vsize;
10276 if (mask_mode == BLKmode)
10277 vsize = vector_size * BITS_PER_UNIT;
10278 else
10279 vsize = GET_MODE_BITSIZE (mask_mode);
10281 unsigned HOST_WIDE_INT esize = vsize / nunits;
10282 gcc_assert (esize * nunits == vsize);
10284 tree bool_type = build_nonstandard_boolean_type (esize);
10286 return make_vector_type (bool_type, nunits, mask_mode);
10289 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10291 tree
10292 build_same_sized_truth_vector_type (tree vectype)
10294 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10295 return vectype;
10297 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10299 if (!size)
10300 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10302 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10305 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10307 tree
10308 build_opaque_vector_type (tree innertype, int nunits)
10310 tree t = make_vector_type (innertype, nunits, VOIDmode);
10311 tree cand;
10312 /* We always build the non-opaque variant before the opaque one,
10313 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10314 cand = TYPE_NEXT_VARIANT (t);
10315 if (cand
10316 && TYPE_VECTOR_OPAQUE (cand)
10317 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10318 return cand;
10319 /* Othewise build a variant type and make sure to queue it after
10320 the non-opaque type. */
10321 cand = build_distinct_type_copy (t);
10322 TYPE_VECTOR_OPAQUE (cand) = true;
10323 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10324 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10325 TYPE_NEXT_VARIANT (t) = cand;
10326 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10327 return cand;
10331 /* Given an initializer INIT, return TRUE if INIT is zero or some
10332 aggregate of zeros. Otherwise return FALSE. */
10333 bool
10334 initializer_zerop (const_tree init)
10336 tree elt;
10338 STRIP_NOPS (init);
10340 switch (TREE_CODE (init))
10342 case INTEGER_CST:
10343 return integer_zerop (init);
10345 case REAL_CST:
10346 /* ??? Note that this is not correct for C4X float formats. There,
10347 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10348 negative exponent. */
10349 return real_zerop (init)
10350 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10352 case FIXED_CST:
10353 return fixed_zerop (init);
10355 case COMPLEX_CST:
10356 return integer_zerop (init)
10357 || (real_zerop (init)
10358 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10359 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10361 case VECTOR_CST:
10363 unsigned i;
10364 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10365 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10366 return false;
10367 return true;
10370 case CONSTRUCTOR:
10372 unsigned HOST_WIDE_INT idx;
10374 if (TREE_CLOBBER_P (init))
10375 return false;
10376 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10377 if (!initializer_zerop (elt))
10378 return false;
10379 return true;
10382 case STRING_CST:
10384 int i;
10386 /* We need to loop through all elements to handle cases like
10387 "\0" and "\0foobar". */
10388 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10389 if (TREE_STRING_POINTER (init)[i] != '\0')
10390 return false;
10392 return true;
10395 default:
10396 return false;
10400 /* Check if vector VEC consists of all the equal elements and
10401 that the number of elements corresponds to the type of VEC.
10402 The function returns first element of the vector
10403 or NULL_TREE if the vector is not uniform. */
10404 tree
10405 uniform_vector_p (const_tree vec)
10407 tree first, t;
10408 unsigned i;
10410 if (vec == NULL_TREE)
10411 return NULL_TREE;
10413 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10415 if (TREE_CODE (vec) == VECTOR_CST)
10417 first = VECTOR_CST_ELT (vec, 0);
10418 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10419 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10420 return NULL_TREE;
10422 return first;
10425 else if (TREE_CODE (vec) == CONSTRUCTOR)
10427 first = error_mark_node;
10429 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10431 if (i == 0)
10433 first = t;
10434 continue;
10436 if (!operand_equal_p (first, t, 0))
10437 return NULL_TREE;
10439 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10440 return NULL_TREE;
10442 return first;
10445 return NULL_TREE;
10448 /* Build an empty statement at location LOC. */
10450 tree
10451 build_empty_stmt (location_t loc)
10453 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10454 SET_EXPR_LOCATION (t, loc);
10455 return t;
10459 /* Build an OpenMP clause with code CODE. LOC is the location of the
10460 clause. */
10462 tree
10463 build_omp_clause (location_t loc, enum omp_clause_code code)
10465 tree t;
10466 int size, length;
10468 length = omp_clause_num_ops[code];
10469 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10471 record_node_allocation_statistics (OMP_CLAUSE, size);
10473 t = (tree) ggc_internal_alloc (size);
10474 memset (t, 0, size);
10475 TREE_SET_CODE (t, OMP_CLAUSE);
10476 OMP_CLAUSE_SET_CODE (t, code);
10477 OMP_CLAUSE_LOCATION (t) = loc;
10479 return t;
10482 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10483 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10484 Except for the CODE and operand count field, other storage for the
10485 object is initialized to zeros. */
10487 tree
10488 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10490 tree t;
10491 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10493 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10494 gcc_assert (len >= 1);
10496 record_node_allocation_statistics (code, length);
10498 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10500 TREE_SET_CODE (t, code);
10502 /* Can't use TREE_OPERAND to store the length because if checking is
10503 enabled, it will try to check the length before we store it. :-P */
10504 t->exp.operands[0] = build_int_cst (sizetype, len);
10506 return t;
10509 /* Helper function for build_call_* functions; build a CALL_EXPR with
10510 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10511 the argument slots. */
10513 static tree
10514 build_call_1 (tree return_type, tree fn, int nargs)
10516 tree t;
10518 t = build_vl_exp (CALL_EXPR, nargs + 3);
10519 TREE_TYPE (t) = return_type;
10520 CALL_EXPR_FN (t) = fn;
10521 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10523 return t;
10526 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10527 FN and a null static chain slot. NARGS is the number of call arguments
10528 which are specified as "..." arguments. */
10530 tree
10531 build_call_nary (tree return_type, tree fn, int nargs, ...)
10533 tree ret;
10534 va_list args;
10535 va_start (args, nargs);
10536 ret = build_call_valist (return_type, fn, nargs, args);
10537 va_end (args);
10538 return ret;
10541 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10542 FN and a null static chain slot. NARGS is the number of call arguments
10543 which are specified as a va_list ARGS. */
10545 tree
10546 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10548 tree t;
10549 int i;
10551 t = build_call_1 (return_type, fn, nargs);
10552 for (i = 0; i < nargs; i++)
10553 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10554 process_call_operands (t);
10555 return t;
10558 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10559 FN and a null static chain slot. NARGS is the number of call arguments
10560 which are specified as a tree array ARGS. */
10562 tree
10563 build_call_array_loc (location_t loc, tree return_type, tree fn,
10564 int nargs, const tree *args)
10566 tree t;
10567 int i;
10569 t = build_call_1 (return_type, fn, nargs);
10570 for (i = 0; i < nargs; i++)
10571 CALL_EXPR_ARG (t, i) = args[i];
10572 process_call_operands (t);
10573 SET_EXPR_LOCATION (t, loc);
10574 return t;
10577 /* Like build_call_array, but takes a vec. */
10579 tree
10580 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10582 tree ret, t;
10583 unsigned int ix;
10585 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10586 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10587 CALL_EXPR_ARG (ret, ix) = t;
10588 process_call_operands (ret);
10589 return ret;
10592 /* Conveniently construct a function call expression. FNDECL names the
10593 function to be called and N arguments are passed in the array
10594 ARGARRAY. */
10596 tree
10597 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10599 tree fntype = TREE_TYPE (fndecl);
10600 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10602 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10605 /* Conveniently construct a function call expression. FNDECL names the
10606 function to be called and the arguments are passed in the vector
10607 VEC. */
10609 tree
10610 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10612 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10613 vec_safe_address (vec));
10617 /* Conveniently construct a function call expression. FNDECL names the
10618 function to be called, N is the number of arguments, and the "..."
10619 parameters are the argument expressions. */
10621 tree
10622 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10624 va_list ap;
10625 tree *argarray = XALLOCAVEC (tree, n);
10626 int i;
10628 va_start (ap, n);
10629 for (i = 0; i < n; i++)
10630 argarray[i] = va_arg (ap, tree);
10631 va_end (ap);
10632 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10635 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10636 varargs macros aren't supported by all bootstrap compilers. */
10638 tree
10639 build_call_expr (tree fndecl, int n, ...)
10641 va_list ap;
10642 tree *argarray = XALLOCAVEC (tree, n);
10643 int i;
10645 va_start (ap, n);
10646 for (i = 0; i < n; i++)
10647 argarray[i] = va_arg (ap, tree);
10648 va_end (ap);
10649 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10652 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10653 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10654 It will get gimplified later into an ordinary internal function. */
10656 tree
10657 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10658 tree type, int n, const tree *args)
10660 tree t = build_call_1 (type, NULL_TREE, n);
10661 for (int i = 0; i < n; ++i)
10662 CALL_EXPR_ARG (t, i) = args[i];
10663 SET_EXPR_LOCATION (t, loc);
10664 CALL_EXPR_IFN (t) = ifn;
10665 return t;
10668 /* Build internal call expression. This is just like CALL_EXPR, except
10669 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10670 internal function. */
10672 tree
10673 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10674 tree type, int n, ...)
10676 va_list ap;
10677 tree *argarray = XALLOCAVEC (tree, n);
10678 int i;
10680 va_start (ap, n);
10681 for (i = 0; i < n; i++)
10682 argarray[i] = va_arg (ap, tree);
10683 va_end (ap);
10684 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10687 /* Return a function call to FN, if the target is guaranteed to support it,
10688 or null otherwise.
10690 N is the number of arguments, passed in the "...", and TYPE is the
10691 type of the return value. */
10693 tree
10694 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10695 int n, ...)
10697 va_list ap;
10698 tree *argarray = XALLOCAVEC (tree, n);
10699 int i;
10701 va_start (ap, n);
10702 for (i = 0; i < n; i++)
10703 argarray[i] = va_arg (ap, tree);
10704 va_end (ap);
10705 if (internal_fn_p (fn))
10707 internal_fn ifn = as_internal_fn (fn);
10708 if (direct_internal_fn_p (ifn))
10710 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10711 if (!direct_internal_fn_supported_p (ifn, types,
10712 OPTIMIZE_FOR_BOTH))
10713 return NULL_TREE;
10715 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10717 else
10719 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10720 if (!fndecl)
10721 return NULL_TREE;
10722 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10726 /* Return a function call to the appropriate builtin alloca variant.
10728 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10729 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10730 bound for SIZE in case it is not a fixed value. */
10732 tree
10733 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10735 if (max_size >= 0)
10737 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10738 return
10739 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10741 else if (align > 0)
10743 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10744 return build_call_expr (t, 2, size, size_int (align));
10746 else
10748 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10749 return build_call_expr (t, 1, size);
10753 /* Create a new constant string literal and return a char* pointer to it.
10754 The STRING_CST value is the LEN characters at STR. */
10755 tree
10756 build_string_literal (int len, const char *str)
10758 tree t, elem, index, type;
10760 t = build_string (len, str);
10761 elem = build_type_variant (char_type_node, 1, 0);
10762 index = build_index_type (size_int (len - 1));
10763 type = build_array_type (elem, index);
10764 TREE_TYPE (t) = type;
10765 TREE_CONSTANT (t) = 1;
10766 TREE_READONLY (t) = 1;
10767 TREE_STATIC (t) = 1;
10769 type = build_pointer_type (elem);
10770 t = build1 (ADDR_EXPR, type,
10771 build4 (ARRAY_REF, elem,
10772 t, integer_zero_node, NULL_TREE, NULL_TREE));
10773 return t;
10778 /* Return true if T (assumed to be a DECL) must be assigned a memory
10779 location. */
10781 bool
10782 needs_to_live_in_memory (const_tree t)
10784 return (TREE_ADDRESSABLE (t)
10785 || is_global_var (t)
10786 || (TREE_CODE (t) == RESULT_DECL
10787 && !DECL_BY_REFERENCE (t)
10788 && aggregate_value_p (t, current_function_decl)));
10791 /* Return value of a constant X and sign-extend it. */
10793 HOST_WIDE_INT
10794 int_cst_value (const_tree x)
10796 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10797 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10799 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10800 gcc_assert (cst_and_fits_in_hwi (x));
10802 if (bits < HOST_BITS_PER_WIDE_INT)
10804 bool negative = ((val >> (bits - 1)) & 1) != 0;
10805 if (negative)
10806 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10807 else
10808 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10811 return val;
10814 /* If TYPE is an integral or pointer type, return an integer type with
10815 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10816 if TYPE is already an integer type of signedness UNSIGNEDP. */
10818 tree
10819 signed_or_unsigned_type_for (int unsignedp, tree type)
10821 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10822 return type;
10824 if (TREE_CODE (type) == VECTOR_TYPE)
10826 tree inner = TREE_TYPE (type);
10827 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10828 if (!inner2)
10829 return NULL_TREE;
10830 if (inner == inner2)
10831 return type;
10832 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10835 if (!INTEGRAL_TYPE_P (type)
10836 && !POINTER_TYPE_P (type)
10837 && TREE_CODE (type) != OFFSET_TYPE)
10838 return NULL_TREE;
10840 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10843 /* If TYPE is an integral or pointer type, return an integer type with
10844 the same precision which is unsigned, or itself if TYPE is already an
10845 unsigned integer type. */
10847 tree
10848 unsigned_type_for (tree type)
10850 return signed_or_unsigned_type_for (1, type);
10853 /* If TYPE is an integral or pointer type, return an integer type with
10854 the same precision which is signed, or itself if TYPE is already a
10855 signed integer type. */
10857 tree
10858 signed_type_for (tree type)
10860 return signed_or_unsigned_type_for (0, type);
10863 /* If TYPE is a vector type, return a signed integer vector type with the
10864 same width and number of subparts. Otherwise return boolean_type_node. */
10866 tree
10867 truth_type_for (tree type)
10869 if (TREE_CODE (type) == VECTOR_TYPE)
10871 if (VECTOR_BOOLEAN_TYPE_P (type))
10872 return type;
10873 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
10874 GET_MODE_SIZE (TYPE_MODE (type)));
10876 else
10877 return boolean_type_node;
10880 /* Returns the largest value obtainable by casting something in INNER type to
10881 OUTER type. */
10883 tree
10884 upper_bound_in_type (tree outer, tree inner)
10886 unsigned int det = 0;
10887 unsigned oprec = TYPE_PRECISION (outer);
10888 unsigned iprec = TYPE_PRECISION (inner);
10889 unsigned prec;
10891 /* Compute a unique number for every combination. */
10892 det |= (oprec > iprec) ? 4 : 0;
10893 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10894 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10896 /* Determine the exponent to use. */
10897 switch (det)
10899 case 0:
10900 case 1:
10901 /* oprec <= iprec, outer: signed, inner: don't care. */
10902 prec = oprec - 1;
10903 break;
10904 case 2:
10905 case 3:
10906 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10907 prec = oprec;
10908 break;
10909 case 4:
10910 /* oprec > iprec, outer: signed, inner: signed. */
10911 prec = iprec - 1;
10912 break;
10913 case 5:
10914 /* oprec > iprec, outer: signed, inner: unsigned. */
10915 prec = iprec;
10916 break;
10917 case 6:
10918 /* oprec > iprec, outer: unsigned, inner: signed. */
10919 prec = oprec;
10920 break;
10921 case 7:
10922 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10923 prec = iprec;
10924 break;
10925 default:
10926 gcc_unreachable ();
10929 return wide_int_to_tree (outer,
10930 wi::mask (prec, false, TYPE_PRECISION (outer)));
10933 /* Returns the smallest value obtainable by casting something in INNER type to
10934 OUTER type. */
10936 tree
10937 lower_bound_in_type (tree outer, tree inner)
10939 unsigned oprec = TYPE_PRECISION (outer);
10940 unsigned iprec = TYPE_PRECISION (inner);
10942 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10943 and obtain 0. */
10944 if (TYPE_UNSIGNED (outer)
10945 /* If we are widening something of an unsigned type, OUTER type
10946 contains all values of INNER type. In particular, both INNER
10947 and OUTER types have zero in common. */
10948 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10949 return build_int_cst (outer, 0);
10950 else
10952 /* If we are widening a signed type to another signed type, we
10953 want to obtain -2^^(iprec-1). If we are keeping the
10954 precision or narrowing to a signed type, we want to obtain
10955 -2^(oprec-1). */
10956 unsigned prec = oprec > iprec ? iprec : oprec;
10957 return wide_int_to_tree (outer,
10958 wi::mask (prec - 1, true,
10959 TYPE_PRECISION (outer)));
10963 /* Return nonzero if two operands that are suitable for PHI nodes are
10964 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10965 SSA_NAME or invariant. Note that this is strictly an optimization.
10966 That is, callers of this function can directly call operand_equal_p
10967 and get the same result, only slower. */
10970 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10972 if (arg0 == arg1)
10973 return 1;
10974 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10975 return 0;
10976 return operand_equal_p (arg0, arg1, 0);
10979 /* Returns number of zeros at the end of binary representation of X. */
10981 tree
10982 num_ending_zeros (const_tree x)
10984 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10988 #define WALK_SUBTREE(NODE) \
10989 do \
10991 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10992 if (result) \
10993 return result; \
10995 while (0)
10997 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10998 be walked whenever a type is seen in the tree. Rest of operands and return
10999 value are as for walk_tree. */
11001 static tree
11002 walk_type_fields (tree type, walk_tree_fn func, void *data,
11003 hash_set<tree> *pset, walk_tree_lh lh)
11005 tree result = NULL_TREE;
11007 switch (TREE_CODE (type))
11009 case POINTER_TYPE:
11010 case REFERENCE_TYPE:
11011 case VECTOR_TYPE:
11012 /* We have to worry about mutually recursive pointers. These can't
11013 be written in C. They can in Ada. It's pathological, but
11014 there's an ACATS test (c38102a) that checks it. Deal with this
11015 by checking if we're pointing to another pointer, that one
11016 points to another pointer, that one does too, and we have no htab.
11017 If so, get a hash table. We check three levels deep to avoid
11018 the cost of the hash table if we don't need one. */
11019 if (POINTER_TYPE_P (TREE_TYPE (type))
11020 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11021 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11022 && !pset)
11024 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11025 func, data);
11026 if (result)
11027 return result;
11029 break;
11032 /* fall through */
11034 case COMPLEX_TYPE:
11035 WALK_SUBTREE (TREE_TYPE (type));
11036 break;
11038 case METHOD_TYPE:
11039 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11041 /* Fall through. */
11043 case FUNCTION_TYPE:
11044 WALK_SUBTREE (TREE_TYPE (type));
11046 tree arg;
11048 /* We never want to walk into default arguments. */
11049 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11050 WALK_SUBTREE (TREE_VALUE (arg));
11052 break;
11054 case ARRAY_TYPE:
11055 /* Don't follow this nodes's type if a pointer for fear that
11056 we'll have infinite recursion. If we have a PSET, then we
11057 need not fear. */
11058 if (pset
11059 || (!POINTER_TYPE_P (TREE_TYPE (type))
11060 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11061 WALK_SUBTREE (TREE_TYPE (type));
11062 WALK_SUBTREE (TYPE_DOMAIN (type));
11063 break;
11065 case OFFSET_TYPE:
11066 WALK_SUBTREE (TREE_TYPE (type));
11067 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11068 break;
11070 default:
11071 break;
11074 return NULL_TREE;
11077 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11078 called with the DATA and the address of each sub-tree. If FUNC returns a
11079 non-NULL value, the traversal is stopped, and the value returned by FUNC
11080 is returned. If PSET is non-NULL it is used to record the nodes visited,
11081 and to avoid visiting a node more than once. */
11083 tree
11084 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11085 hash_set<tree> *pset, walk_tree_lh lh)
11087 enum tree_code code;
11088 int walk_subtrees;
11089 tree result;
11091 #define WALK_SUBTREE_TAIL(NODE) \
11092 do \
11094 tp = & (NODE); \
11095 goto tail_recurse; \
11097 while (0)
11099 tail_recurse:
11100 /* Skip empty subtrees. */
11101 if (!*tp)
11102 return NULL_TREE;
11104 /* Don't walk the same tree twice, if the user has requested
11105 that we avoid doing so. */
11106 if (pset && pset->add (*tp))
11107 return NULL_TREE;
11109 /* Call the function. */
11110 walk_subtrees = 1;
11111 result = (*func) (tp, &walk_subtrees, data);
11113 /* If we found something, return it. */
11114 if (result)
11115 return result;
11117 code = TREE_CODE (*tp);
11119 /* Even if we didn't, FUNC may have decided that there was nothing
11120 interesting below this point in the tree. */
11121 if (!walk_subtrees)
11123 /* But we still need to check our siblings. */
11124 if (code == TREE_LIST)
11125 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11126 else if (code == OMP_CLAUSE)
11127 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11128 else
11129 return NULL_TREE;
11132 if (lh)
11134 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11135 if (result || !walk_subtrees)
11136 return result;
11139 switch (code)
11141 case ERROR_MARK:
11142 case IDENTIFIER_NODE:
11143 case INTEGER_CST:
11144 case REAL_CST:
11145 case FIXED_CST:
11146 case VECTOR_CST:
11147 case STRING_CST:
11148 case BLOCK:
11149 case PLACEHOLDER_EXPR:
11150 case SSA_NAME:
11151 case FIELD_DECL:
11152 case RESULT_DECL:
11153 /* None of these have subtrees other than those already walked
11154 above. */
11155 break;
11157 case TREE_LIST:
11158 WALK_SUBTREE (TREE_VALUE (*tp));
11159 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11160 break;
11162 case TREE_VEC:
11164 int len = TREE_VEC_LENGTH (*tp);
11166 if (len == 0)
11167 break;
11169 /* Walk all elements but the first. */
11170 while (--len)
11171 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11173 /* Now walk the first one as a tail call. */
11174 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11177 case COMPLEX_CST:
11178 WALK_SUBTREE (TREE_REALPART (*tp));
11179 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11181 case CONSTRUCTOR:
11183 unsigned HOST_WIDE_INT idx;
11184 constructor_elt *ce;
11186 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11187 idx++)
11188 WALK_SUBTREE (ce->value);
11190 break;
11192 case SAVE_EXPR:
11193 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11195 case BIND_EXPR:
11197 tree decl;
11198 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11200 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11201 into declarations that are just mentioned, rather than
11202 declared; they don't really belong to this part of the tree.
11203 And, we can see cycles: the initializer for a declaration
11204 can refer to the declaration itself. */
11205 WALK_SUBTREE (DECL_INITIAL (decl));
11206 WALK_SUBTREE (DECL_SIZE (decl));
11207 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11209 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11212 case STATEMENT_LIST:
11214 tree_stmt_iterator i;
11215 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11216 WALK_SUBTREE (*tsi_stmt_ptr (i));
11218 break;
11220 case OMP_CLAUSE:
11221 switch (OMP_CLAUSE_CODE (*tp))
11223 case OMP_CLAUSE_GANG:
11224 case OMP_CLAUSE__GRIDDIM_:
11225 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11226 /* FALLTHRU */
11228 case OMP_CLAUSE_ASYNC:
11229 case OMP_CLAUSE_WAIT:
11230 case OMP_CLAUSE_WORKER:
11231 case OMP_CLAUSE_VECTOR:
11232 case OMP_CLAUSE_NUM_GANGS:
11233 case OMP_CLAUSE_NUM_WORKERS:
11234 case OMP_CLAUSE_VECTOR_LENGTH:
11235 case OMP_CLAUSE_PRIVATE:
11236 case OMP_CLAUSE_SHARED:
11237 case OMP_CLAUSE_FIRSTPRIVATE:
11238 case OMP_CLAUSE_COPYIN:
11239 case OMP_CLAUSE_COPYPRIVATE:
11240 case OMP_CLAUSE_FINAL:
11241 case OMP_CLAUSE_IF:
11242 case OMP_CLAUSE_NUM_THREADS:
11243 case OMP_CLAUSE_SCHEDULE:
11244 case OMP_CLAUSE_UNIFORM:
11245 case OMP_CLAUSE_DEPEND:
11246 case OMP_CLAUSE_NUM_TEAMS:
11247 case OMP_CLAUSE_THREAD_LIMIT:
11248 case OMP_CLAUSE_DEVICE:
11249 case OMP_CLAUSE_DIST_SCHEDULE:
11250 case OMP_CLAUSE_SAFELEN:
11251 case OMP_CLAUSE_SIMDLEN:
11252 case OMP_CLAUSE_ORDERED:
11253 case OMP_CLAUSE_PRIORITY:
11254 case OMP_CLAUSE_GRAINSIZE:
11255 case OMP_CLAUSE_NUM_TASKS:
11256 case OMP_CLAUSE_HINT:
11257 case OMP_CLAUSE_TO_DECLARE:
11258 case OMP_CLAUSE_LINK:
11259 case OMP_CLAUSE_USE_DEVICE_PTR:
11260 case OMP_CLAUSE_IS_DEVICE_PTR:
11261 case OMP_CLAUSE__LOOPTEMP_:
11262 case OMP_CLAUSE__SIMDUID_:
11263 case OMP_CLAUSE__CILK_FOR_COUNT_:
11264 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11265 /* FALLTHRU */
11267 case OMP_CLAUSE_INDEPENDENT:
11268 case OMP_CLAUSE_NOWAIT:
11269 case OMP_CLAUSE_DEFAULT:
11270 case OMP_CLAUSE_UNTIED:
11271 case OMP_CLAUSE_MERGEABLE:
11272 case OMP_CLAUSE_PROC_BIND:
11273 case OMP_CLAUSE_INBRANCH:
11274 case OMP_CLAUSE_NOTINBRANCH:
11275 case OMP_CLAUSE_FOR:
11276 case OMP_CLAUSE_PARALLEL:
11277 case OMP_CLAUSE_SECTIONS:
11278 case OMP_CLAUSE_TASKGROUP:
11279 case OMP_CLAUSE_NOGROUP:
11280 case OMP_CLAUSE_THREADS:
11281 case OMP_CLAUSE_SIMD:
11282 case OMP_CLAUSE_DEFAULTMAP:
11283 case OMP_CLAUSE_AUTO:
11284 case OMP_CLAUSE_SEQ:
11285 case OMP_CLAUSE_TILE:
11286 case OMP_CLAUSE__SIMT_:
11287 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11289 case OMP_CLAUSE_LASTPRIVATE:
11290 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11291 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11292 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11294 case OMP_CLAUSE_COLLAPSE:
11296 int i;
11297 for (i = 0; i < 3; i++)
11298 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11299 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11302 case OMP_CLAUSE_LINEAR:
11303 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11304 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11305 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11306 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11308 case OMP_CLAUSE_ALIGNED:
11309 case OMP_CLAUSE_FROM:
11310 case OMP_CLAUSE_TO:
11311 case OMP_CLAUSE_MAP:
11312 case OMP_CLAUSE__CACHE_:
11313 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11314 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11315 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11317 case OMP_CLAUSE_REDUCTION:
11319 int i;
11320 for (i = 0; i < 5; i++)
11321 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11322 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11325 default:
11326 gcc_unreachable ();
11328 break;
11330 case TARGET_EXPR:
11332 int i, len;
11334 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11335 But, we only want to walk once. */
11336 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11337 for (i = 0; i < len; ++i)
11338 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11339 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11342 case DECL_EXPR:
11343 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11344 defining. We only want to walk into these fields of a type in this
11345 case and not in the general case of a mere reference to the type.
11347 The criterion is as follows: if the field can be an expression, it
11348 must be walked only here. This should be in keeping with the fields
11349 that are directly gimplified in gimplify_type_sizes in order for the
11350 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11351 variable-sized types.
11353 Note that DECLs get walked as part of processing the BIND_EXPR. */
11354 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11356 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11357 if (TREE_CODE (*type_p) == ERROR_MARK)
11358 return NULL_TREE;
11360 /* Call the function for the type. See if it returns anything or
11361 doesn't want us to continue. If we are to continue, walk both
11362 the normal fields and those for the declaration case. */
11363 result = (*func) (type_p, &walk_subtrees, data);
11364 if (result || !walk_subtrees)
11365 return result;
11367 /* But do not walk a pointed-to type since it may itself need to
11368 be walked in the declaration case if it isn't anonymous. */
11369 if (!POINTER_TYPE_P (*type_p))
11371 result = walk_type_fields (*type_p, func, data, pset, lh);
11372 if (result)
11373 return result;
11376 /* If this is a record type, also walk the fields. */
11377 if (RECORD_OR_UNION_TYPE_P (*type_p))
11379 tree field;
11381 for (field = TYPE_FIELDS (*type_p); field;
11382 field = DECL_CHAIN (field))
11384 /* We'd like to look at the type of the field, but we can
11385 easily get infinite recursion. So assume it's pointed
11386 to elsewhere in the tree. Also, ignore things that
11387 aren't fields. */
11388 if (TREE_CODE (field) != FIELD_DECL)
11389 continue;
11391 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11392 WALK_SUBTREE (DECL_SIZE (field));
11393 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11394 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11395 WALK_SUBTREE (DECL_QUALIFIER (field));
11399 /* Same for scalar types. */
11400 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11401 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11402 || TREE_CODE (*type_p) == INTEGER_TYPE
11403 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11404 || TREE_CODE (*type_p) == REAL_TYPE)
11406 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11407 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11410 WALK_SUBTREE (TYPE_SIZE (*type_p));
11411 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11413 /* FALLTHRU */
11415 default:
11416 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11418 int i, len;
11420 /* Walk over all the sub-trees of this operand. */
11421 len = TREE_OPERAND_LENGTH (*tp);
11423 /* Go through the subtrees. We need to do this in forward order so
11424 that the scope of a FOR_EXPR is handled properly. */
11425 if (len)
11427 for (i = 0; i < len - 1; ++i)
11428 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11429 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11432 /* If this is a type, walk the needed fields in the type. */
11433 else if (TYPE_P (*tp))
11434 return walk_type_fields (*tp, func, data, pset, lh);
11435 break;
11438 /* We didn't find what we were looking for. */
11439 return NULL_TREE;
11441 #undef WALK_SUBTREE_TAIL
11443 #undef WALK_SUBTREE
11445 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11447 tree
11448 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11449 walk_tree_lh lh)
11451 tree result;
11453 hash_set<tree> pset;
11454 result = walk_tree_1 (tp, func, data, &pset, lh);
11455 return result;
11459 tree
11460 tree_block (tree t)
11462 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11464 if (IS_EXPR_CODE_CLASS (c))
11465 return LOCATION_BLOCK (t->exp.locus);
11466 gcc_unreachable ();
11467 return NULL;
11470 void
11471 tree_set_block (tree t, tree b)
11473 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11475 if (IS_EXPR_CODE_CLASS (c))
11477 t->exp.locus = set_block (t->exp.locus, b);
11479 else
11480 gcc_unreachable ();
11483 /* Create a nameless artificial label and put it in the current
11484 function context. The label has a location of LOC. Returns the
11485 newly created label. */
11487 tree
11488 create_artificial_label (location_t loc)
11490 tree lab = build_decl (loc,
11491 LABEL_DECL, NULL_TREE, void_type_node);
11493 DECL_ARTIFICIAL (lab) = 1;
11494 DECL_IGNORED_P (lab) = 1;
11495 DECL_CONTEXT (lab) = current_function_decl;
11496 return lab;
11499 /* Given a tree, try to return a useful variable name that we can use
11500 to prefix a temporary that is being assigned the value of the tree.
11501 I.E. given <temp> = &A, return A. */
11503 const char *
11504 get_name (tree t)
11506 tree stripped_decl;
11508 stripped_decl = t;
11509 STRIP_NOPS (stripped_decl);
11510 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11511 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11512 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11514 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11515 if (!name)
11516 return NULL;
11517 return IDENTIFIER_POINTER (name);
11519 else
11521 switch (TREE_CODE (stripped_decl))
11523 case ADDR_EXPR:
11524 return get_name (TREE_OPERAND (stripped_decl, 0));
11525 default:
11526 return NULL;
11531 /* Return true if TYPE has a variable argument list. */
11533 bool
11534 stdarg_p (const_tree fntype)
11536 function_args_iterator args_iter;
11537 tree n = NULL_TREE, t;
11539 if (!fntype)
11540 return false;
11542 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11544 n = t;
11547 return n != NULL_TREE && n != void_type_node;
11550 /* Return true if TYPE has a prototype. */
11552 bool
11553 prototype_p (const_tree fntype)
11555 tree t;
11557 gcc_assert (fntype != NULL_TREE);
11559 t = TYPE_ARG_TYPES (fntype);
11560 return (t != NULL_TREE);
11563 /* If BLOCK is inlined from an __attribute__((__artificial__))
11564 routine, return pointer to location from where it has been
11565 called. */
11566 location_t *
11567 block_nonartificial_location (tree block)
11569 location_t *ret = NULL;
11571 while (block && TREE_CODE (block) == BLOCK
11572 && BLOCK_ABSTRACT_ORIGIN (block))
11574 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11576 while (TREE_CODE (ao) == BLOCK
11577 && BLOCK_ABSTRACT_ORIGIN (ao)
11578 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11579 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11581 if (TREE_CODE (ao) == FUNCTION_DECL)
11583 /* If AO is an artificial inline, point RET to the
11584 call site locus at which it has been inlined and continue
11585 the loop, in case AO's caller is also an artificial
11586 inline. */
11587 if (DECL_DECLARED_INLINE_P (ao)
11588 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11589 ret = &BLOCK_SOURCE_LOCATION (block);
11590 else
11591 break;
11593 else if (TREE_CODE (ao) != BLOCK)
11594 break;
11596 block = BLOCK_SUPERCONTEXT (block);
11598 return ret;
11602 /* If EXP is inlined from an __attribute__((__artificial__))
11603 function, return the location of the original call expression. */
11605 location_t
11606 tree_nonartificial_location (tree exp)
11608 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11610 if (loc)
11611 return *loc;
11612 else
11613 return EXPR_LOCATION (exp);
11617 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11618 nodes. */
11620 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11622 hashval_t
11623 cl_option_hasher::hash (tree x)
11625 const_tree const t = x;
11626 const char *p;
11627 size_t i;
11628 size_t len = 0;
11629 hashval_t hash = 0;
11631 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11633 p = (const char *)TREE_OPTIMIZATION (t);
11634 len = sizeof (struct cl_optimization);
11637 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11638 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11640 else
11641 gcc_unreachable ();
11643 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11644 something else. */
11645 for (i = 0; i < len; i++)
11646 if (p[i])
11647 hash = (hash << 4) ^ ((i << 2) | p[i]);
11649 return hash;
11652 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11653 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11654 same. */
11656 bool
11657 cl_option_hasher::equal (tree x, tree y)
11659 const_tree const xt = x;
11660 const_tree const yt = y;
11661 const char *xp;
11662 const char *yp;
11663 size_t len;
11665 if (TREE_CODE (xt) != TREE_CODE (yt))
11666 return 0;
11668 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11670 xp = (const char *)TREE_OPTIMIZATION (xt);
11671 yp = (const char *)TREE_OPTIMIZATION (yt);
11672 len = sizeof (struct cl_optimization);
11675 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11677 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11678 TREE_TARGET_OPTION (yt));
11681 else
11682 gcc_unreachable ();
11684 return (memcmp (xp, yp, len) == 0);
11687 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11689 tree
11690 build_optimization_node (struct gcc_options *opts)
11692 tree t;
11694 /* Use the cache of optimization nodes. */
11696 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11697 opts);
11699 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11700 t = *slot;
11701 if (!t)
11703 /* Insert this one into the hash table. */
11704 t = cl_optimization_node;
11705 *slot = t;
11707 /* Make a new node for next time round. */
11708 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11711 return t;
11714 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11716 tree
11717 build_target_option_node (struct gcc_options *opts)
11719 tree t;
11721 /* Use the cache of optimization nodes. */
11723 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11724 opts);
11726 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11727 t = *slot;
11728 if (!t)
11730 /* Insert this one into the hash table. */
11731 t = cl_target_option_node;
11732 *slot = t;
11734 /* Make a new node for next time round. */
11735 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11738 return t;
11741 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11742 so that they aren't saved during PCH writing. */
11744 void
11745 prepare_target_option_nodes_for_pch (void)
11747 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11748 for (; iter != cl_option_hash_table->end (); ++iter)
11749 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11750 TREE_TARGET_GLOBALS (*iter) = NULL;
11753 /* Determine the "ultimate origin" of a block. The block may be an inlined
11754 instance of an inlined instance of a block which is local to an inline
11755 function, so we have to trace all of the way back through the origin chain
11756 to find out what sort of node actually served as the original seed for the
11757 given block. */
11759 tree
11760 block_ultimate_origin (const_tree block)
11762 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11764 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11765 we're trying to output the abstract instance of this function. */
11766 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11767 return NULL_TREE;
11769 if (immediate_origin == NULL_TREE)
11770 return NULL_TREE;
11771 else
11773 tree ret_val;
11774 tree lookahead = immediate_origin;
11778 ret_val = lookahead;
11779 lookahead = (TREE_CODE (ret_val) == BLOCK
11780 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11782 while (lookahead != NULL && lookahead != ret_val);
11784 /* The block's abstract origin chain may not be the *ultimate* origin of
11785 the block. It could lead to a DECL that has an abstract origin set.
11786 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11787 will give us if it has one). Note that DECL's abstract origins are
11788 supposed to be the most distant ancestor (or so decl_ultimate_origin
11789 claims), so we don't need to loop following the DECL origins. */
11790 if (DECL_P (ret_val))
11791 return DECL_ORIGIN (ret_val);
11793 return ret_val;
11797 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11798 no instruction. */
11800 bool
11801 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11803 /* Do not strip casts into or out of differing address spaces. */
11804 if (POINTER_TYPE_P (outer_type)
11805 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11807 if (!POINTER_TYPE_P (inner_type)
11808 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11809 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11810 return false;
11812 else if (POINTER_TYPE_P (inner_type)
11813 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11815 /* We already know that outer_type is not a pointer with
11816 a non-generic address space. */
11817 return false;
11820 /* Use precision rather then machine mode when we can, which gives
11821 the correct answer even for submode (bit-field) types. */
11822 if ((INTEGRAL_TYPE_P (outer_type)
11823 || POINTER_TYPE_P (outer_type)
11824 || TREE_CODE (outer_type) == OFFSET_TYPE)
11825 && (INTEGRAL_TYPE_P (inner_type)
11826 || POINTER_TYPE_P (inner_type)
11827 || TREE_CODE (inner_type) == OFFSET_TYPE))
11828 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11830 /* Otherwise fall back on comparing machine modes (e.g. for
11831 aggregate types, floats). */
11832 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11835 /* Return true iff conversion in EXP generates no instruction. Mark
11836 it inline so that we fully inline into the stripping functions even
11837 though we have two uses of this function. */
11839 static inline bool
11840 tree_nop_conversion (const_tree exp)
11842 tree outer_type, inner_type;
11844 if (!CONVERT_EXPR_P (exp)
11845 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11846 return false;
11847 if (TREE_OPERAND (exp, 0) == error_mark_node)
11848 return false;
11850 outer_type = TREE_TYPE (exp);
11851 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11853 if (!inner_type)
11854 return false;
11856 return tree_nop_conversion_p (outer_type, inner_type);
11859 /* Return true iff conversion in EXP generates no instruction. Don't
11860 consider conversions changing the signedness. */
11862 static bool
11863 tree_sign_nop_conversion (const_tree exp)
11865 tree outer_type, inner_type;
11867 if (!tree_nop_conversion (exp))
11868 return false;
11870 outer_type = TREE_TYPE (exp);
11871 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11873 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11874 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11877 /* Strip conversions from EXP according to tree_nop_conversion and
11878 return the resulting expression. */
11880 tree
11881 tree_strip_nop_conversions (tree exp)
11883 while (tree_nop_conversion (exp))
11884 exp = TREE_OPERAND (exp, 0);
11885 return exp;
11888 /* Strip conversions from EXP according to tree_sign_nop_conversion
11889 and return the resulting expression. */
11891 tree
11892 tree_strip_sign_nop_conversions (tree exp)
11894 while (tree_sign_nop_conversion (exp))
11895 exp = TREE_OPERAND (exp, 0);
11896 return exp;
11899 /* Avoid any floating point extensions from EXP. */
11900 tree
11901 strip_float_extensions (tree exp)
11903 tree sub, expt, subt;
11905 /* For floating point constant look up the narrowest type that can hold
11906 it properly and handle it like (type)(narrowest_type)constant.
11907 This way we can optimize for instance a=a*2.0 where "a" is float
11908 but 2.0 is double constant. */
11909 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11911 REAL_VALUE_TYPE orig;
11912 tree type = NULL;
11914 orig = TREE_REAL_CST (exp);
11915 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11916 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11917 type = float_type_node;
11918 else if (TYPE_PRECISION (TREE_TYPE (exp))
11919 > TYPE_PRECISION (double_type_node)
11920 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11921 type = double_type_node;
11922 if (type)
11923 return build_real_truncate (type, orig);
11926 if (!CONVERT_EXPR_P (exp))
11927 return exp;
11929 sub = TREE_OPERAND (exp, 0);
11930 subt = TREE_TYPE (sub);
11931 expt = TREE_TYPE (exp);
11933 if (!FLOAT_TYPE_P (subt))
11934 return exp;
11936 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11937 return exp;
11939 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11940 return exp;
11942 return strip_float_extensions (sub);
11945 /* Strip out all handled components that produce invariant
11946 offsets. */
11948 const_tree
11949 strip_invariant_refs (const_tree op)
11951 while (handled_component_p (op))
11953 switch (TREE_CODE (op))
11955 case ARRAY_REF:
11956 case ARRAY_RANGE_REF:
11957 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11958 || TREE_OPERAND (op, 2) != NULL_TREE
11959 || TREE_OPERAND (op, 3) != NULL_TREE)
11960 return NULL;
11961 break;
11963 case COMPONENT_REF:
11964 if (TREE_OPERAND (op, 2) != NULL_TREE)
11965 return NULL;
11966 break;
11968 default:;
11970 op = TREE_OPERAND (op, 0);
11973 return op;
11976 static GTY(()) tree gcc_eh_personality_decl;
11978 /* Return the GCC personality function decl. */
11980 tree
11981 lhd_gcc_personality (void)
11983 if (!gcc_eh_personality_decl)
11984 gcc_eh_personality_decl = build_personality_function ("gcc");
11985 return gcc_eh_personality_decl;
11988 /* TARGET is a call target of GIMPLE call statement
11989 (obtained by gimple_call_fn). Return true if it is
11990 OBJ_TYPE_REF representing an virtual call of C++ method.
11991 (As opposed to OBJ_TYPE_REF representing objc calls
11992 through a cast where middle-end devirtualization machinery
11993 can't apply.) */
11995 bool
11996 virtual_method_call_p (const_tree target)
11998 if (TREE_CODE (target) != OBJ_TYPE_REF)
11999 return false;
12000 tree t = TREE_TYPE (target);
12001 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12002 t = TREE_TYPE (t);
12003 if (TREE_CODE (t) == FUNCTION_TYPE)
12004 return false;
12005 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12006 /* If we do not have BINFO associated, it means that type was built
12007 without devirtualization enabled. Do not consider this a virtual
12008 call. */
12009 if (!TYPE_BINFO (obj_type_ref_class (target)))
12010 return false;
12011 return true;
12014 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12016 tree
12017 obj_type_ref_class (const_tree ref)
12019 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12020 ref = TREE_TYPE (ref);
12021 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12022 ref = TREE_TYPE (ref);
12023 /* We look for type THIS points to. ObjC also builds
12024 OBJ_TYPE_REF with non-method calls, Their first parameter
12025 ID however also corresponds to class type. */
12026 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12027 || TREE_CODE (ref) == FUNCTION_TYPE);
12028 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12029 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12030 return TREE_TYPE (ref);
12033 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12035 static tree
12036 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12038 unsigned int i;
12039 tree base_binfo, b;
12041 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12042 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12043 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12044 return base_binfo;
12045 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12046 return b;
12047 return NULL;
12050 /* Try to find a base info of BINFO that would have its field decl at offset
12051 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12052 found, return, otherwise return NULL_TREE. */
12054 tree
12055 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12057 tree type = BINFO_TYPE (binfo);
12059 while (true)
12061 HOST_WIDE_INT pos, size;
12062 tree fld;
12063 int i;
12065 if (types_same_for_odr (type, expected_type))
12066 return binfo;
12067 if (offset < 0)
12068 return NULL_TREE;
12070 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12072 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12073 continue;
12075 pos = int_bit_position (fld);
12076 size = tree_to_uhwi (DECL_SIZE (fld));
12077 if (pos <= offset && (pos + size) > offset)
12078 break;
12080 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12081 return NULL_TREE;
12083 /* Offset 0 indicates the primary base, whose vtable contents are
12084 represented in the binfo for the derived class. */
12085 else if (offset != 0)
12087 tree found_binfo = NULL, base_binfo;
12088 /* Offsets in BINFO are in bytes relative to the whole structure
12089 while POS is in bits relative to the containing field. */
12090 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12091 / BITS_PER_UNIT);
12093 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12094 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12095 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12097 found_binfo = base_binfo;
12098 break;
12100 if (found_binfo)
12101 binfo = found_binfo;
12102 else
12103 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12104 binfo_offset);
12107 type = TREE_TYPE (fld);
12108 offset -= pos;
12112 /* Returns true if X is a typedef decl. */
12114 bool
12115 is_typedef_decl (const_tree x)
12117 return (x && TREE_CODE (x) == TYPE_DECL
12118 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12121 /* Returns true iff TYPE is a type variant created for a typedef. */
12123 bool
12124 typedef_variant_p (const_tree type)
12126 return is_typedef_decl (TYPE_NAME (type));
12129 /* Warn about a use of an identifier which was marked deprecated. */
12130 void
12131 warn_deprecated_use (tree node, tree attr)
12133 const char *msg;
12135 if (node == 0 || !warn_deprecated_decl)
12136 return;
12138 if (!attr)
12140 if (DECL_P (node))
12141 attr = DECL_ATTRIBUTES (node);
12142 else if (TYPE_P (node))
12144 tree decl = TYPE_STUB_DECL (node);
12145 if (decl)
12146 attr = lookup_attribute ("deprecated",
12147 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12151 if (attr)
12152 attr = lookup_attribute ("deprecated", attr);
12154 if (attr)
12155 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12156 else
12157 msg = NULL;
12159 bool w;
12160 if (DECL_P (node))
12162 if (msg)
12163 w = warning (OPT_Wdeprecated_declarations,
12164 "%qD is deprecated: %s", node, msg);
12165 else
12166 w = warning (OPT_Wdeprecated_declarations,
12167 "%qD is deprecated", node);
12168 if (w)
12169 inform (DECL_SOURCE_LOCATION (node), "declared here");
12171 else if (TYPE_P (node))
12173 tree what = NULL_TREE;
12174 tree decl = TYPE_STUB_DECL (node);
12176 if (TYPE_NAME (node))
12178 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12179 what = TYPE_NAME (node);
12180 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12181 && DECL_NAME (TYPE_NAME (node)))
12182 what = DECL_NAME (TYPE_NAME (node));
12185 if (decl)
12187 if (what)
12189 if (msg)
12190 w = warning (OPT_Wdeprecated_declarations,
12191 "%qE is deprecated: %s", what, msg);
12192 else
12193 w = warning (OPT_Wdeprecated_declarations,
12194 "%qE is deprecated", what);
12196 else
12198 if (msg)
12199 w = warning (OPT_Wdeprecated_declarations,
12200 "type is deprecated: %s", msg);
12201 else
12202 w = warning (OPT_Wdeprecated_declarations,
12203 "type is deprecated");
12205 if (w)
12206 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12208 else
12210 if (what)
12212 if (msg)
12213 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12214 what, msg);
12215 else
12216 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12218 else
12220 if (msg)
12221 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12222 msg);
12223 else
12224 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12230 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12231 somewhere in it. */
12233 bool
12234 contains_bitfld_component_ref_p (const_tree ref)
12236 while (handled_component_p (ref))
12238 if (TREE_CODE (ref) == COMPONENT_REF
12239 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12240 return true;
12241 ref = TREE_OPERAND (ref, 0);
12244 return false;
12247 /* Try to determine whether a TRY_CATCH expression can fall through.
12248 This is a subroutine of block_may_fallthru. */
12250 static bool
12251 try_catch_may_fallthru (const_tree stmt)
12253 tree_stmt_iterator i;
12255 /* If the TRY block can fall through, the whole TRY_CATCH can
12256 fall through. */
12257 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12258 return true;
12260 i = tsi_start (TREE_OPERAND (stmt, 1));
12261 switch (TREE_CODE (tsi_stmt (i)))
12263 case CATCH_EXPR:
12264 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12265 catch expression and a body. The whole TRY_CATCH may fall
12266 through iff any of the catch bodies falls through. */
12267 for (; !tsi_end_p (i); tsi_next (&i))
12269 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12270 return true;
12272 return false;
12274 case EH_FILTER_EXPR:
12275 /* The exception filter expression only matters if there is an
12276 exception. If the exception does not match EH_FILTER_TYPES,
12277 we will execute EH_FILTER_FAILURE, and we will fall through
12278 if that falls through. If the exception does match
12279 EH_FILTER_TYPES, the stack unwinder will continue up the
12280 stack, so we will not fall through. We don't know whether we
12281 will throw an exception which matches EH_FILTER_TYPES or not,
12282 so we just ignore EH_FILTER_TYPES and assume that we might
12283 throw an exception which doesn't match. */
12284 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12286 default:
12287 /* This case represents statements to be executed when an
12288 exception occurs. Those statements are implicitly followed
12289 by a RESX statement to resume execution after the exception.
12290 So in this case the TRY_CATCH never falls through. */
12291 return false;
12295 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12296 need not be 100% accurate; simply be conservative and return true if we
12297 don't know. This is used only to avoid stupidly generating extra code.
12298 If we're wrong, we'll just delete the extra code later. */
12300 bool
12301 block_may_fallthru (const_tree block)
12303 /* This CONST_CAST is okay because expr_last returns its argument
12304 unmodified and we assign it to a const_tree. */
12305 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12307 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12309 case GOTO_EXPR:
12310 case RETURN_EXPR:
12311 /* Easy cases. If the last statement of the block implies
12312 control transfer, then we can't fall through. */
12313 return false;
12315 case SWITCH_EXPR:
12316 /* If SWITCH_LABELS is set, this is lowered, and represents a
12317 branch to a selected label and hence can not fall through.
12318 Otherwise SWITCH_BODY is set, and the switch can fall
12319 through. */
12320 return SWITCH_LABELS (stmt) == NULL_TREE;
12322 case COND_EXPR:
12323 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12324 return true;
12325 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12327 case BIND_EXPR:
12328 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12330 case TRY_CATCH_EXPR:
12331 return try_catch_may_fallthru (stmt);
12333 case TRY_FINALLY_EXPR:
12334 /* The finally clause is always executed after the try clause,
12335 so if it does not fall through, then the try-finally will not
12336 fall through. Otherwise, if the try clause does not fall
12337 through, then when the finally clause falls through it will
12338 resume execution wherever the try clause was going. So the
12339 whole try-finally will only fall through if both the try
12340 clause and the finally clause fall through. */
12341 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12342 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12344 case MODIFY_EXPR:
12345 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12346 stmt = TREE_OPERAND (stmt, 1);
12347 else
12348 return true;
12349 /* FALLTHRU */
12351 case CALL_EXPR:
12352 /* Functions that do not return do not fall through. */
12353 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12355 case CLEANUP_POINT_EXPR:
12356 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12358 case TARGET_EXPR:
12359 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12361 case ERROR_MARK:
12362 return true;
12364 default:
12365 return lang_hooks.block_may_fallthru (stmt);
12369 /* True if we are using EH to handle cleanups. */
12370 static bool using_eh_for_cleanups_flag = false;
12372 /* This routine is called from front ends to indicate eh should be used for
12373 cleanups. */
12374 void
12375 using_eh_for_cleanups (void)
12377 using_eh_for_cleanups_flag = true;
12380 /* Query whether EH is used for cleanups. */
12381 bool
12382 using_eh_for_cleanups_p (void)
12384 return using_eh_for_cleanups_flag;
12387 /* Wrapper for tree_code_name to ensure that tree code is valid */
12388 const char *
12389 get_tree_code_name (enum tree_code code)
12391 const char *invalid = "<invalid tree code>";
12393 if (code >= MAX_TREE_CODES)
12394 return invalid;
12396 return tree_code_name[code];
12399 /* Drops the TREE_OVERFLOW flag from T. */
12401 tree
12402 drop_tree_overflow (tree t)
12404 gcc_checking_assert (TREE_OVERFLOW (t));
12406 /* For tree codes with a sharing machinery re-build the result. */
12407 if (TREE_CODE (t) == INTEGER_CST)
12408 return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
12410 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12411 and drop the flag. */
12412 t = copy_node (t);
12413 TREE_OVERFLOW (t) = 0;
12415 /* For constants that contain nested constants, drop the flag
12416 from those as well. */
12417 if (TREE_CODE (t) == COMPLEX_CST)
12419 if (TREE_OVERFLOW (TREE_REALPART (t)))
12420 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12421 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12422 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12424 if (TREE_CODE (t) == VECTOR_CST)
12426 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
12428 tree& elt = VECTOR_CST_ELT (t, i);
12429 if (TREE_OVERFLOW (elt))
12430 elt = drop_tree_overflow (elt);
12433 return t;
12436 /* Given a memory reference expression T, return its base address.
12437 The base address of a memory reference expression is the main
12438 object being referenced. For instance, the base address for
12439 'array[i].fld[j]' is 'array'. You can think of this as stripping
12440 away the offset part from a memory address.
12442 This function calls handled_component_p to strip away all the inner
12443 parts of the memory reference until it reaches the base object. */
12445 tree
12446 get_base_address (tree t)
12448 while (handled_component_p (t))
12449 t = TREE_OPERAND (t, 0);
12451 if ((TREE_CODE (t) == MEM_REF
12452 || TREE_CODE (t) == TARGET_MEM_REF)
12453 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12454 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12456 /* ??? Either the alias oracle or all callers need to properly deal
12457 with WITH_SIZE_EXPRs before we can look through those. */
12458 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12459 return NULL_TREE;
12461 return t;
12464 /* Return a tree of sizetype representing the size, in bytes, of the element
12465 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12467 tree
12468 array_ref_element_size (tree exp)
12470 tree aligned_size = TREE_OPERAND (exp, 3);
12471 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12472 location_t loc = EXPR_LOCATION (exp);
12474 /* If a size was specified in the ARRAY_REF, it's the size measured
12475 in alignment units of the element type. So multiply by that value. */
12476 if (aligned_size)
12478 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12479 sizetype from another type of the same width and signedness. */
12480 if (TREE_TYPE (aligned_size) != sizetype)
12481 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12482 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12483 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12486 /* Otherwise, take the size from that of the element type. Substitute
12487 any PLACEHOLDER_EXPR that we have. */
12488 else
12489 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12492 /* Return a tree representing the lower bound of the array mentioned in
12493 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12495 tree
12496 array_ref_low_bound (tree exp)
12498 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12500 /* If a lower bound is specified in EXP, use it. */
12501 if (TREE_OPERAND (exp, 2))
12502 return TREE_OPERAND (exp, 2);
12504 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12505 substituting for a PLACEHOLDER_EXPR as needed. */
12506 if (domain_type && TYPE_MIN_VALUE (domain_type))
12507 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12509 /* Otherwise, return a zero of the appropriate type. */
12510 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12513 /* Return a tree representing the upper bound of the array mentioned in
12514 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12516 tree
12517 array_ref_up_bound (tree exp)
12519 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12521 /* If there is a domain type and it has an upper bound, use it, substituting
12522 for a PLACEHOLDER_EXPR as needed. */
12523 if (domain_type && TYPE_MAX_VALUE (domain_type))
12524 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12526 /* Otherwise fail. */
12527 return NULL_TREE;
12530 /* Returns true if REF is an array reference or a component reference
12531 to an array at the end of a structure.
12532 If this is the case, the array may be allocated larger
12533 than its upper bound implies. */
12535 bool
12536 array_at_struct_end_p (tree ref)
12538 tree atype;
12540 if (TREE_CODE (ref) == ARRAY_REF
12541 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12543 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12544 ref = TREE_OPERAND (ref, 0);
12546 else if (TREE_CODE (ref) == COMPONENT_REF
12547 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12548 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12549 else
12550 return false;
12552 while (handled_component_p (ref))
12554 /* If the reference chain contains a component reference to a
12555 non-union type and there follows another field the reference
12556 is not at the end of a structure. */
12557 if (TREE_CODE (ref) == COMPONENT_REF)
12559 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12561 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12562 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12563 nextf = DECL_CHAIN (nextf);
12564 if (nextf)
12565 return false;
12568 /* If we have a multi-dimensional array we do not consider
12569 a non-innermost dimension as flex array if the whole
12570 multi-dimensional array is at struct end.
12571 Same for an array of aggregates with a trailing array
12572 member. */
12573 else if (TREE_CODE (ref) == ARRAY_REF)
12574 return false;
12575 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12577 /* If we view an underlying object as sth else then what we
12578 gathered up to now is what we have to rely on. */
12579 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12580 break;
12581 else
12582 gcc_unreachable ();
12584 ref = TREE_OPERAND (ref, 0);
12587 /* The array now is at struct end. Treat flexible arrays as
12588 always subject to extend, even into just padding constrained by
12589 an underlying decl. */
12590 if (! TYPE_SIZE (atype))
12591 return true;
12593 tree size = NULL;
12595 if (TREE_CODE (ref) == MEM_REF
12596 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12598 size = TYPE_SIZE (TREE_TYPE (ref));
12599 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12602 /* If the reference is based on a declared entity, the size of the array
12603 is constrained by its given domain. (Do not trust commons PR/69368). */
12604 if (DECL_P (ref)
12605 /* Be sure the size of MEM_REF target match. For example:
12607 char buf[10];
12608 struct foo *str = (struct foo *)&buf;
12610 str->trailin_array[2] = 1;
12612 is valid because BUF allocate enough space. */
12614 && (!size || (DECL_SIZE (ref) != NULL
12615 && operand_equal_p (DECL_SIZE (ref), size, 0)))
12616 && !(flag_unconstrained_commons
12617 && VAR_P (ref) && DECL_COMMON (ref)))
12618 return false;
12620 return true;
12623 /* Return a tree representing the offset, in bytes, of the field referenced
12624 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12626 tree
12627 component_ref_field_offset (tree exp)
12629 tree aligned_offset = TREE_OPERAND (exp, 2);
12630 tree field = TREE_OPERAND (exp, 1);
12631 location_t loc = EXPR_LOCATION (exp);
12633 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12634 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12635 value. */
12636 if (aligned_offset)
12638 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12639 sizetype from another type of the same width and signedness. */
12640 if (TREE_TYPE (aligned_offset) != sizetype)
12641 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12642 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12643 size_int (DECL_OFFSET_ALIGN (field)
12644 / BITS_PER_UNIT));
12647 /* Otherwise, take the offset from that of the field. Substitute
12648 any PLACEHOLDER_EXPR that we have. */
12649 else
12650 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12653 /* Return the machine mode of T. For vectors, returns the mode of the
12654 inner type. The main use case is to feed the result to HONOR_NANS,
12655 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12657 machine_mode
12658 element_mode (const_tree t)
12660 if (!TYPE_P (t))
12661 t = TREE_TYPE (t);
12662 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12663 t = TREE_TYPE (t);
12664 return TYPE_MODE (t);
12667 /* Vector types need to re-check the target flags each time we report
12668 the machine mode. We need to do this because attribute target can
12669 change the result of vector_mode_supported_p and have_regs_of_mode
12670 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12671 change on a per-function basis. */
12672 /* ??? Possibly a better solution is to run through all the types
12673 referenced by a function and re-compute the TYPE_MODE once, rather
12674 than make the TYPE_MODE macro call a function. */
12676 machine_mode
12677 vector_type_mode (const_tree t)
12679 machine_mode mode;
12681 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12683 mode = t->type_common.mode;
12684 if (VECTOR_MODE_P (mode)
12685 && (!targetm.vector_mode_supported_p (mode)
12686 || !have_regs_of_mode[mode]))
12688 scalar_int_mode innermode;
12690 /* For integers, try mapping it to a same-sized scalar mode. */
12691 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12693 unsigned int size = (TYPE_VECTOR_SUBPARTS (t)
12694 * GET_MODE_BITSIZE (innermode));
12695 scalar_int_mode mode;
12696 if (int_mode_for_size (size, 0).exists (&mode)
12697 && have_regs_of_mode[mode])
12698 return mode;
12701 return BLKmode;
12704 return mode;
12707 /* Verify that basic properties of T match TV and thus T can be a variant of
12708 TV. TV should be the more specified variant (i.e. the main variant). */
12710 static bool
12711 verify_type_variant (const_tree t, tree tv)
12713 /* Type variant can differ by:
12715 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12716 ENCODE_QUAL_ADDR_SPACE.
12717 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12718 in this case some values may not be set in the variant types
12719 (see TYPE_COMPLETE_P checks).
12720 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12721 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12722 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12723 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12724 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12725 this is necessary to make it possible to merge types form different TUs
12726 - arrays, pointers and references may have TREE_TYPE that is a variant
12727 of TREE_TYPE of their main variants.
12728 - aggregates may have new TYPE_FIELDS list that list variants of
12729 the main variant TYPE_FIELDS.
12730 - vector types may differ by TYPE_VECTOR_OPAQUE
12733 /* Convenience macro for matching individual fields. */
12734 #define verify_variant_match(flag) \
12735 do { \
12736 if (flag (tv) != flag (t)) \
12738 error ("type variant differs by " #flag "."); \
12739 debug_tree (tv); \
12740 return false; \
12742 } while (false)
12744 /* tree_base checks. */
12746 verify_variant_match (TREE_CODE);
12747 /* FIXME: Ada builds non-artificial variants of artificial types. */
12748 if (TYPE_ARTIFICIAL (tv) && 0)
12749 verify_variant_match (TYPE_ARTIFICIAL);
12750 if (POINTER_TYPE_P (tv))
12751 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12752 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12753 verify_variant_match (TYPE_UNSIGNED);
12754 verify_variant_match (TYPE_PACKED);
12755 if (TREE_CODE (t) == REFERENCE_TYPE)
12756 verify_variant_match (TYPE_REF_IS_RVALUE);
12757 if (AGGREGATE_TYPE_P (t))
12758 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12759 else
12760 verify_variant_match (TYPE_SATURATING);
12761 /* FIXME: This check trigger during libstdc++ build. */
12762 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12763 verify_variant_match (TYPE_FINAL_P);
12765 /* tree_type_common checks. */
12767 if (COMPLETE_TYPE_P (t))
12769 verify_variant_match (TYPE_MODE);
12770 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12771 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12772 verify_variant_match (TYPE_SIZE);
12773 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12774 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12775 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12777 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12778 TYPE_SIZE_UNIT (tv), 0));
12779 error ("type variant has different TYPE_SIZE_UNIT");
12780 debug_tree (tv);
12781 error ("type variant's TYPE_SIZE_UNIT");
12782 debug_tree (TYPE_SIZE_UNIT (tv));
12783 error ("type's TYPE_SIZE_UNIT");
12784 debug_tree (TYPE_SIZE_UNIT (t));
12785 return false;
12788 verify_variant_match (TYPE_PRECISION);
12789 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12790 if (RECORD_OR_UNION_TYPE_P (t))
12791 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12792 else if (TREE_CODE (t) == ARRAY_TYPE)
12793 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12794 /* During LTO we merge variant lists from diferent translation units
12795 that may differ BY TYPE_CONTEXT that in turn may point
12796 to TRANSLATION_UNIT_DECL.
12797 Ada also builds variants of types with different TYPE_CONTEXT. */
12798 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12799 verify_variant_match (TYPE_CONTEXT);
12800 verify_variant_match (TYPE_STRING_FLAG);
12801 if (TYPE_ALIAS_SET_KNOWN_P (t))
12803 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12804 debug_tree (tv);
12805 return false;
12808 /* tree_type_non_common checks. */
12810 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12811 and dangle the pointer from time to time. */
12812 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12813 && (in_lto_p || !TYPE_VFIELD (tv)
12814 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12816 error ("type variant has different TYPE_VFIELD");
12817 debug_tree (tv);
12818 return false;
12820 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12821 || TREE_CODE (t) == INTEGER_TYPE
12822 || TREE_CODE (t) == BOOLEAN_TYPE
12823 || TREE_CODE (t) == REAL_TYPE
12824 || TREE_CODE (t) == FIXED_POINT_TYPE)
12826 verify_variant_match (TYPE_MAX_VALUE);
12827 verify_variant_match (TYPE_MIN_VALUE);
12829 if (TREE_CODE (t) == METHOD_TYPE)
12830 verify_variant_match (TYPE_METHOD_BASETYPE);
12831 if (TREE_CODE (t) == OFFSET_TYPE)
12832 verify_variant_match (TYPE_OFFSET_BASETYPE);
12833 if (TREE_CODE (t) == ARRAY_TYPE)
12834 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12835 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12836 or even type's main variant. This is needed to make bootstrap pass
12837 and the bug seems new in GCC 5.
12838 C++ FE should be updated to make this consistent and we should check
12839 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12840 is a match with main variant.
12842 Also disable the check for Java for now because of parser hack that builds
12843 first an dummy BINFO and then sometimes replace it by real BINFO in some
12844 of the copies. */
12845 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12846 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12847 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12848 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12849 at LTO time only. */
12850 && (in_lto_p && odr_type_p (t)))
12852 error ("type variant has different TYPE_BINFO");
12853 debug_tree (tv);
12854 error ("type variant's TYPE_BINFO");
12855 debug_tree (TYPE_BINFO (tv));
12856 error ("type's TYPE_BINFO");
12857 debug_tree (TYPE_BINFO (t));
12858 return false;
12861 /* Check various uses of TYPE_VALUES_RAW. */
12862 if (TREE_CODE (t) == ENUMERAL_TYPE)
12863 verify_variant_match (TYPE_VALUES);
12864 else if (TREE_CODE (t) == ARRAY_TYPE)
12865 verify_variant_match (TYPE_DOMAIN);
12866 /* Permit incomplete variants of complete type. While FEs may complete
12867 all variants, this does not happen for C++ templates in all cases. */
12868 else if (RECORD_OR_UNION_TYPE_P (t)
12869 && COMPLETE_TYPE_P (t)
12870 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12872 tree f1, f2;
12874 /* Fortran builds qualified variants as new records with items of
12875 qualified type. Verify that they looks same. */
12876 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12877 f1 && f2;
12878 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12879 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12880 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12881 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12882 /* FIXME: gfc_nonrestricted_type builds all types as variants
12883 with exception of pointer types. It deeply copies the type
12884 which means that we may end up with a variant type
12885 referring non-variant pointer. We may change it to
12886 produce types as variants, too, like
12887 objc_get_protocol_qualified_type does. */
12888 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12889 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12890 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12891 break;
12892 if (f1 || f2)
12894 error ("type variant has different TYPE_FIELDS");
12895 debug_tree (tv);
12896 error ("first mismatch is field");
12897 debug_tree (f1);
12898 error ("and field");
12899 debug_tree (f2);
12900 return false;
12903 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12904 verify_variant_match (TYPE_ARG_TYPES);
12905 /* For C++ the qualified variant of array type is really an array type
12906 of qualified TREE_TYPE.
12907 objc builds variants of pointer where pointer to type is a variant, too
12908 in objc_get_protocol_qualified_type. */
12909 if (TREE_TYPE (t) != TREE_TYPE (tv)
12910 && ((TREE_CODE (t) != ARRAY_TYPE
12911 && !POINTER_TYPE_P (t))
12912 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12913 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12915 error ("type variant has different TREE_TYPE");
12916 debug_tree (tv);
12917 error ("type variant's TREE_TYPE");
12918 debug_tree (TREE_TYPE (tv));
12919 error ("type's TREE_TYPE");
12920 debug_tree (TREE_TYPE (t));
12921 return false;
12923 if (type_with_alias_set_p (t)
12924 && !gimple_canonical_types_compatible_p (t, tv, false))
12926 error ("type is not compatible with its variant");
12927 debug_tree (tv);
12928 error ("type variant's TREE_TYPE");
12929 debug_tree (TREE_TYPE (tv));
12930 error ("type's TREE_TYPE");
12931 debug_tree (TREE_TYPE (t));
12932 return false;
12934 return true;
12935 #undef verify_variant_match
12939 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12940 the middle-end types_compatible_p function. It needs to avoid
12941 claiming types are different for types that should be treated
12942 the same with respect to TBAA. Canonical types are also used
12943 for IL consistency checks via the useless_type_conversion_p
12944 predicate which does not handle all type kinds itself but falls
12945 back to pointer-comparison of TYPE_CANONICAL for aggregates
12946 for example. */
12948 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
12949 type calculation because we need to allow inter-operability between signed
12950 and unsigned variants. */
12952 bool
12953 type_with_interoperable_signedness (const_tree type)
12955 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
12956 signed char and unsigned char. Similarly fortran FE builds
12957 C_SIZE_T as signed type, while C defines it unsigned. */
12959 return tree_code_for_canonical_type_merging (TREE_CODE (type))
12960 == INTEGER_TYPE
12961 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
12962 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
12965 /* Return true iff T1 and T2 are structurally identical for what
12966 TBAA is concerned.
12967 This function is used both by lto.c canonical type merging and by the
12968 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12969 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
12970 only for LTO because only in these cases TYPE_CANONICAL equivalence
12971 correspond to one defined by gimple_canonical_types_compatible_p. */
12973 bool
12974 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12975 bool trust_type_canonical)
12977 /* Type variants should be same as the main variant. When not doing sanity
12978 checking to verify this fact, go to main variants and save some work. */
12979 if (trust_type_canonical)
12981 t1 = TYPE_MAIN_VARIANT (t1);
12982 t2 = TYPE_MAIN_VARIANT (t2);
12985 /* Check first for the obvious case of pointer identity. */
12986 if (t1 == t2)
12987 return true;
12989 /* Check that we have two types to compare. */
12990 if (t1 == NULL_TREE || t2 == NULL_TREE)
12991 return false;
12993 /* We consider complete types always compatible with incomplete type.
12994 This does not make sense for canonical type calculation and thus we
12995 need to ensure that we are never called on it.
12997 FIXME: For more correctness the function probably should have three modes
12998 1) mode assuming that types are complete mathcing their structure
12999 2) mode allowing incomplete types but producing equivalence classes
13000 and thus ignoring all info from complete types
13001 3) mode allowing incomplete types to match complete but checking
13002 compatibility between complete types.
13004 1 and 2 can be used for canonical type calculation. 3 is the real
13005 definition of type compatibility that can be used i.e. for warnings during
13006 declaration merging. */
13008 gcc_assert (!trust_type_canonical
13009 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13010 /* If the types have been previously registered and found equal
13011 they still are. */
13013 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13014 && trust_type_canonical)
13016 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13017 they are always NULL, but they are set to non-NULL for types
13018 constructed by build_pointer_type and variants. In this case the
13019 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13020 all pointers are considered equal. Be sure to not return false
13021 negatives. */
13022 gcc_checking_assert (canonical_type_used_p (t1)
13023 && canonical_type_used_p (t2));
13024 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13027 /* Can't be the same type if the types don't have the same code. */
13028 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13029 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13030 return false;
13032 /* Qualifiers do not matter for canonical type comparison purposes. */
13034 /* Void types and nullptr types are always the same. */
13035 if (TREE_CODE (t1) == VOID_TYPE
13036 || TREE_CODE (t1) == NULLPTR_TYPE)
13037 return true;
13039 /* Can't be the same type if they have different mode. */
13040 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13041 return false;
13043 /* Non-aggregate types can be handled cheaply. */
13044 if (INTEGRAL_TYPE_P (t1)
13045 || SCALAR_FLOAT_TYPE_P (t1)
13046 || FIXED_POINT_TYPE_P (t1)
13047 || TREE_CODE (t1) == VECTOR_TYPE
13048 || TREE_CODE (t1) == COMPLEX_TYPE
13049 || TREE_CODE (t1) == OFFSET_TYPE
13050 || POINTER_TYPE_P (t1))
13052 /* Can't be the same type if they have different recision. */
13053 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13054 return false;
13056 /* In some cases the signed and unsigned types are required to be
13057 inter-operable. */
13058 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13059 && !type_with_interoperable_signedness (t1))
13060 return false;
13062 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13063 interoperable with "signed char". Unless all frontends are revisited
13064 to agree on these types, we must ignore the flag completely. */
13066 /* Fortran standard define C_PTR type that is compatible with every
13067 C pointer. For this reason we need to glob all pointers into one.
13068 Still pointers in different address spaces are not compatible. */
13069 if (POINTER_TYPE_P (t1))
13071 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13072 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13073 return false;
13076 /* Tail-recurse to components. */
13077 if (TREE_CODE (t1) == VECTOR_TYPE
13078 || TREE_CODE (t1) == COMPLEX_TYPE)
13079 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13080 TREE_TYPE (t2),
13081 trust_type_canonical);
13083 return true;
13086 /* Do type-specific comparisons. */
13087 switch (TREE_CODE (t1))
13089 case ARRAY_TYPE:
13090 /* Array types are the same if the element types are the same and
13091 the number of elements are the same. */
13092 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13093 trust_type_canonical)
13094 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13095 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13096 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13097 return false;
13098 else
13100 tree i1 = TYPE_DOMAIN (t1);
13101 tree i2 = TYPE_DOMAIN (t2);
13103 /* For an incomplete external array, the type domain can be
13104 NULL_TREE. Check this condition also. */
13105 if (i1 == NULL_TREE && i2 == NULL_TREE)
13106 return true;
13107 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13108 return false;
13109 else
13111 tree min1 = TYPE_MIN_VALUE (i1);
13112 tree min2 = TYPE_MIN_VALUE (i2);
13113 tree max1 = TYPE_MAX_VALUE (i1);
13114 tree max2 = TYPE_MAX_VALUE (i2);
13116 /* The minimum/maximum values have to be the same. */
13117 if ((min1 == min2
13118 || (min1 && min2
13119 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13120 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13121 || operand_equal_p (min1, min2, 0))))
13122 && (max1 == max2
13123 || (max1 && max2
13124 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13125 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13126 || operand_equal_p (max1, max2, 0)))))
13127 return true;
13128 else
13129 return false;
13133 case METHOD_TYPE:
13134 case FUNCTION_TYPE:
13135 /* Function types are the same if the return type and arguments types
13136 are the same. */
13137 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13138 trust_type_canonical))
13139 return false;
13141 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13142 return true;
13143 else
13145 tree parms1, parms2;
13147 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13148 parms1 && parms2;
13149 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13151 if (!gimple_canonical_types_compatible_p
13152 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13153 trust_type_canonical))
13154 return false;
13157 if (parms1 || parms2)
13158 return false;
13160 return true;
13163 case RECORD_TYPE:
13164 case UNION_TYPE:
13165 case QUAL_UNION_TYPE:
13167 tree f1, f2;
13169 /* Don't try to compare variants of an incomplete type, before
13170 TYPE_FIELDS has been copied around. */
13171 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13172 return true;
13175 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13176 return false;
13178 /* For aggregate types, all the fields must be the same. */
13179 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13180 f1 || f2;
13181 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13183 /* Skip non-fields and zero-sized fields. */
13184 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13185 || (DECL_SIZE (f1)
13186 && integer_zerop (DECL_SIZE (f1)))))
13187 f1 = TREE_CHAIN (f1);
13188 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13189 || (DECL_SIZE (f2)
13190 && integer_zerop (DECL_SIZE (f2)))))
13191 f2 = TREE_CHAIN (f2);
13192 if (!f1 || !f2)
13193 break;
13194 /* The fields must have the same name, offset and type. */
13195 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13196 || !gimple_compare_field_offset (f1, f2)
13197 || !gimple_canonical_types_compatible_p
13198 (TREE_TYPE (f1), TREE_TYPE (f2),
13199 trust_type_canonical))
13200 return false;
13203 /* If one aggregate has more fields than the other, they
13204 are not the same. */
13205 if (f1 || f2)
13206 return false;
13208 return true;
13211 default:
13212 /* Consider all types with language specific trees in them mutually
13213 compatible. This is executed only from verify_type and false
13214 positives can be tolerated. */
13215 gcc_assert (!in_lto_p);
13216 return true;
13220 /* Verify type T. */
13222 void
13223 verify_type (const_tree t)
13225 bool error_found = false;
13226 tree mv = TYPE_MAIN_VARIANT (t);
13227 if (!mv)
13229 error ("Main variant is not defined");
13230 error_found = true;
13232 else if (mv != TYPE_MAIN_VARIANT (mv))
13234 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13235 debug_tree (mv);
13236 error_found = true;
13238 else if (t != mv && !verify_type_variant (t, mv))
13239 error_found = true;
13241 tree ct = TYPE_CANONICAL (t);
13242 if (!ct)
13244 else if (TYPE_CANONICAL (t) != ct)
13246 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13247 debug_tree (ct);
13248 error_found = true;
13250 /* Method and function types can not be used to address memory and thus
13251 TYPE_CANONICAL really matters only for determining useless conversions.
13253 FIXME: C++ FE produce declarations of builtin functions that are not
13254 compatible with main variants. */
13255 else if (TREE_CODE (t) == FUNCTION_TYPE)
13257 else if (t != ct
13258 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13259 with variably sized arrays because their sizes possibly
13260 gimplified to different variables. */
13261 && !variably_modified_type_p (ct, NULL)
13262 && !gimple_canonical_types_compatible_p (t, ct, false))
13264 error ("TYPE_CANONICAL is not compatible");
13265 debug_tree (ct);
13266 error_found = true;
13269 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13270 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13272 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13273 debug_tree (ct);
13274 error_found = true;
13276 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13278 error ("TYPE_CANONICAL of main variant is not main variant");
13279 debug_tree (ct);
13280 debug_tree (TYPE_MAIN_VARIANT (ct));
13281 error_found = true;
13285 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13286 if (RECORD_OR_UNION_TYPE_P (t))
13288 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13289 and danagle the pointer from time to time. */
13290 if (TYPE_VFIELD (t)
13291 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13292 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13294 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13295 debug_tree (TYPE_VFIELD (t));
13296 error_found = true;
13299 else if (TREE_CODE (t) == POINTER_TYPE)
13301 if (TYPE_NEXT_PTR_TO (t)
13302 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13304 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13305 debug_tree (TYPE_NEXT_PTR_TO (t));
13306 error_found = true;
13309 else if (TREE_CODE (t) == REFERENCE_TYPE)
13311 if (TYPE_NEXT_REF_TO (t)
13312 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13314 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13315 debug_tree (TYPE_NEXT_REF_TO (t));
13316 error_found = true;
13319 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13320 || TREE_CODE (t) == FIXED_POINT_TYPE)
13322 /* FIXME: The following check should pass:
13323 useless_type_conversion_p (const_cast <tree> (t),
13324 TREE_TYPE (TYPE_MIN_VALUE (t))
13325 but does not for C sizetypes in LTO. */
13328 /* Check various uses of TYPE_MAXVAL_RAW. */
13329 if (RECORD_OR_UNION_TYPE_P (t))
13331 if (!TYPE_BINFO (t))
13333 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13335 error ("TYPE_BINFO is not TREE_BINFO");
13336 debug_tree (TYPE_BINFO (t));
13337 error_found = true;
13339 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13341 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13342 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13343 error_found = true;
13346 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13348 if (TYPE_METHOD_BASETYPE (t)
13349 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13350 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13352 error ("TYPE_METHOD_BASETYPE is not record nor union");
13353 debug_tree (TYPE_METHOD_BASETYPE (t));
13354 error_found = true;
13357 else if (TREE_CODE (t) == OFFSET_TYPE)
13359 if (TYPE_OFFSET_BASETYPE (t)
13360 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13361 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13363 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13364 debug_tree (TYPE_OFFSET_BASETYPE (t));
13365 error_found = true;
13368 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13369 || TREE_CODE (t) == FIXED_POINT_TYPE)
13371 /* FIXME: The following check should pass:
13372 useless_type_conversion_p (const_cast <tree> (t),
13373 TREE_TYPE (TYPE_MAX_VALUE (t))
13374 but does not for C sizetypes in LTO. */
13376 else if (TREE_CODE (t) == ARRAY_TYPE)
13378 if (TYPE_ARRAY_MAX_SIZE (t)
13379 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13381 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13382 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13383 error_found = true;
13386 else if (TYPE_MAX_VALUE_RAW (t))
13388 error ("TYPE_MAX_VALUE_RAW non-NULL");
13389 debug_tree (TYPE_MAX_VALUE_RAW (t));
13390 error_found = true;
13393 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13395 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13396 debug_tree (TYPE_LANG_SLOT_1 (t));
13397 error_found = true;
13400 /* Check various uses of TYPE_VALUES_RAW. */
13401 if (TREE_CODE (t) == ENUMERAL_TYPE)
13402 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13404 tree value = TREE_VALUE (l);
13405 tree name = TREE_PURPOSE (l);
13407 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13408 CONST_DECL of ENUMERAL TYPE. */
13409 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13411 error ("Enum value is not CONST_DECL or INTEGER_CST");
13412 debug_tree (value);
13413 debug_tree (name);
13414 error_found = true;
13416 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13417 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13419 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13420 debug_tree (value);
13421 debug_tree (name);
13422 error_found = true;
13424 if (TREE_CODE (name) != IDENTIFIER_NODE)
13426 error ("Enum value name is not IDENTIFIER_NODE");
13427 debug_tree (value);
13428 debug_tree (name);
13429 error_found = true;
13432 else if (TREE_CODE (t) == ARRAY_TYPE)
13434 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13436 error ("Array TYPE_DOMAIN is not integer type");
13437 debug_tree (TYPE_DOMAIN (t));
13438 error_found = true;
13441 else if (RECORD_OR_UNION_TYPE_P (t))
13443 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13445 error ("TYPE_FIELDS defined in incomplete type");
13446 error_found = true;
13448 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13450 /* TODO: verify properties of decls. */
13451 if (TREE_CODE (fld) == FIELD_DECL)
13453 else if (TREE_CODE (fld) == TYPE_DECL)
13455 else if (TREE_CODE (fld) == CONST_DECL)
13457 else if (VAR_P (fld))
13459 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13461 else if (TREE_CODE (fld) == USING_DECL)
13463 else if (TREE_CODE (fld) == FUNCTION_DECL)
13465 else
13467 error ("Wrong tree in TYPE_FIELDS list");
13468 debug_tree (fld);
13469 error_found = true;
13473 else if (TREE_CODE (t) == INTEGER_TYPE
13474 || TREE_CODE (t) == BOOLEAN_TYPE
13475 || TREE_CODE (t) == OFFSET_TYPE
13476 || TREE_CODE (t) == REFERENCE_TYPE
13477 || TREE_CODE (t) == NULLPTR_TYPE
13478 || TREE_CODE (t) == POINTER_TYPE)
13480 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13482 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13483 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13484 error_found = true;
13486 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13488 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13489 debug_tree (TYPE_CACHED_VALUES (t));
13490 error_found = true;
13492 /* Verify just enough of cache to ensure that no one copied it to new type.
13493 All copying should go by copy_node that should clear it. */
13494 else if (TYPE_CACHED_VALUES_P (t))
13496 int i;
13497 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13498 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13499 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13501 error ("wrong TYPE_CACHED_VALUES entry");
13502 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13503 error_found = true;
13504 break;
13508 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13509 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13511 /* C++ FE uses TREE_PURPOSE to store initial values. */
13512 if (TREE_PURPOSE (l) && in_lto_p)
13514 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13515 debug_tree (l);
13516 error_found = true;
13518 if (!TYPE_P (TREE_VALUE (l)))
13520 error ("Wrong entry in TYPE_ARG_TYPES list");
13521 debug_tree (l);
13522 error_found = true;
13525 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13527 error ("TYPE_VALUES_RAW field is non-NULL");
13528 debug_tree (TYPE_VALUES_RAW (t));
13529 error_found = true;
13531 if (TREE_CODE (t) != INTEGER_TYPE
13532 && TREE_CODE (t) != BOOLEAN_TYPE
13533 && TREE_CODE (t) != OFFSET_TYPE
13534 && TREE_CODE (t) != REFERENCE_TYPE
13535 && TREE_CODE (t) != NULLPTR_TYPE
13536 && TREE_CODE (t) != POINTER_TYPE
13537 && TYPE_CACHED_VALUES_P (t))
13539 error ("TYPE_CACHED_VALUES_P is set while it should not");
13540 error_found = true;
13542 if (TYPE_STRING_FLAG (t)
13543 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13545 error ("TYPE_STRING_FLAG is set on wrong type code");
13546 error_found = true;
13549 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13550 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13551 of a type. */
13552 if (TREE_CODE (t) == METHOD_TYPE
13553 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13555 error ("TYPE_METHOD_BASETYPE is not main variant");
13556 error_found = true;
13559 if (error_found)
13561 debug_tree (const_cast <tree> (t));
13562 internal_error ("verify_type failed");
13567 /* Return 1 if ARG interpreted as signed in its precision is known to be
13568 always positive or 2 if ARG is known to be always negative, or 3 if
13569 ARG may be positive or negative. */
13572 get_range_pos_neg (tree arg)
13574 if (arg == error_mark_node)
13575 return 3;
13577 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13578 int cnt = 0;
13579 if (TREE_CODE (arg) == INTEGER_CST)
13581 wide_int w = wi::sext (wi::to_wide (arg), prec);
13582 if (wi::neg_p (w))
13583 return 2;
13584 else
13585 return 1;
13587 while (CONVERT_EXPR_P (arg)
13588 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13589 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13591 arg = TREE_OPERAND (arg, 0);
13592 /* Narrower value zero extended into wider type
13593 will always result in positive values. */
13594 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13595 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13596 return 1;
13597 prec = TYPE_PRECISION (TREE_TYPE (arg));
13598 if (++cnt > 30)
13599 return 3;
13602 if (TREE_CODE (arg) != SSA_NAME)
13603 return 3;
13604 wide_int arg_min, arg_max;
13605 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13607 gimple *g = SSA_NAME_DEF_STMT (arg);
13608 if (is_gimple_assign (g)
13609 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13611 tree t = gimple_assign_rhs1 (g);
13612 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13613 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13615 if (TYPE_UNSIGNED (TREE_TYPE (t))
13616 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13617 return 1;
13618 prec = TYPE_PRECISION (TREE_TYPE (t));
13619 arg = t;
13620 if (++cnt > 30)
13621 return 3;
13622 continue;
13625 return 3;
13627 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13629 /* For unsigned values, the "positive" range comes
13630 below the "negative" range. */
13631 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13632 return 1;
13633 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13634 return 2;
13636 else
13638 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13639 return 1;
13640 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13641 return 2;
13643 return 3;
13649 /* Return true if ARG is marked with the nonnull attribute in the
13650 current function signature. */
13652 bool
13653 nonnull_arg_p (const_tree arg)
13655 tree t, attrs, fntype;
13656 unsigned HOST_WIDE_INT arg_num;
13658 gcc_assert (TREE_CODE (arg) == PARM_DECL
13659 && (POINTER_TYPE_P (TREE_TYPE (arg))
13660 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13662 /* The static chain decl is always non null. */
13663 if (arg == cfun->static_chain_decl)
13664 return true;
13666 /* THIS argument of method is always non-NULL. */
13667 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13668 && arg == DECL_ARGUMENTS (cfun->decl)
13669 && flag_delete_null_pointer_checks)
13670 return true;
13672 /* Values passed by reference are always non-NULL. */
13673 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13674 && flag_delete_null_pointer_checks)
13675 return true;
13677 fntype = TREE_TYPE (cfun->decl);
13678 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13680 attrs = lookup_attribute ("nonnull", attrs);
13682 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13683 if (attrs == NULL_TREE)
13684 return false;
13686 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13687 if (TREE_VALUE (attrs) == NULL_TREE)
13688 return true;
13690 /* Get the position number for ARG in the function signature. */
13691 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13693 t = DECL_CHAIN (t), arg_num++)
13695 if (t == arg)
13696 break;
13699 gcc_assert (t == arg);
13701 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13702 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13704 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13705 return true;
13709 return false;
13712 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13713 information. */
13715 location_t
13716 set_block (location_t loc, tree block)
13718 location_t pure_loc = get_pure_location (loc);
13719 source_range src_range = get_range_from_loc (line_table, loc);
13720 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13723 location_t
13724 set_source_range (tree expr, location_t start, location_t finish)
13726 source_range src_range;
13727 src_range.m_start = start;
13728 src_range.m_finish = finish;
13729 return set_source_range (expr, src_range);
13732 location_t
13733 set_source_range (tree expr, source_range src_range)
13735 if (!EXPR_P (expr))
13736 return UNKNOWN_LOCATION;
13738 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13739 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13740 pure_loc,
13741 src_range,
13742 NULL);
13743 SET_EXPR_LOCATION (expr, adhoc);
13744 return adhoc;
13747 /* Return the name of combined function FN, for debugging purposes. */
13749 const char *
13750 combined_fn_name (combined_fn fn)
13752 if (builtin_fn_p (fn))
13754 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13755 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13757 else
13758 return internal_fn_name (as_internal_fn (fn));
13761 /* Return a bitmap with a bit set corresponding to each argument in
13762 a function call type FNTYPE declared with attribute nonnull,
13763 or null if none of the function's argument are nonnull. The caller
13764 must free the bitmap. */
13766 bitmap
13767 get_nonnull_args (const_tree fntype)
13769 if (fntype == NULL_TREE)
13770 return NULL;
13772 tree attrs = TYPE_ATTRIBUTES (fntype);
13773 if (!attrs)
13774 return NULL;
13776 bitmap argmap = NULL;
13778 /* A function declaration can specify multiple attribute nonnull,
13779 each with zero or more arguments. The loop below creates a bitmap
13780 representing a union of all the arguments. An empty (but non-null)
13781 bitmap means that all arguments have been declaraed nonnull. */
13782 for ( ; attrs; attrs = TREE_CHAIN (attrs))
13784 attrs = lookup_attribute ("nonnull", attrs);
13785 if (!attrs)
13786 break;
13788 if (!argmap)
13789 argmap = BITMAP_ALLOC (NULL);
13791 if (!TREE_VALUE (attrs))
13793 /* Clear the bitmap in case a previous attribute nonnull
13794 set it and this one overrides it for all arguments. */
13795 bitmap_clear (argmap);
13796 return argmap;
13799 /* Iterate over the indices of the format arguments declared nonnull
13800 and set a bit for each. */
13801 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
13803 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
13804 bitmap_set_bit (argmap, val);
13808 return argmap;
13811 /* List of pointer types used to declare builtins before we have seen their
13812 real declaration.
13814 Keep the size up to date in tree.h ! */
13815 const builtin_structptr_type builtin_structptr_types[6] =
13817 { fileptr_type_node, ptr_type_node, "FILE" },
13818 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
13819 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
13820 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
13821 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
13822 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
13825 #if CHECKING_P
13827 namespace selftest {
13829 /* Selftests for tree. */
13831 /* Verify that integer constants are sane. */
13833 static void
13834 test_integer_constants ()
13836 ASSERT_TRUE (integer_type_node != NULL);
13837 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
13839 tree type = integer_type_node;
13841 tree zero = build_zero_cst (type);
13842 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
13843 ASSERT_EQ (type, TREE_TYPE (zero));
13845 tree one = build_int_cst (type, 1);
13846 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
13847 ASSERT_EQ (type, TREE_TYPE (zero));
13850 /* Verify identifiers. */
13852 static void
13853 test_identifiers ()
13855 tree identifier = get_identifier ("foo");
13856 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
13857 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
13860 /* Verify LABEL_DECL. */
13862 static void
13863 test_labels ()
13865 tree identifier = get_identifier ("err");
13866 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
13867 identifier, void_type_node);
13868 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
13869 ASSERT_FALSE (FORCED_LABEL (label_decl));
13872 /* Run all of the selftests within this file. */
13874 void
13875 tree_c_tests ()
13877 test_integer_constants ();
13878 test_identifiers ();
13879 test_labels ();
13882 } // namespace selftest
13884 #endif /* CHECKING_P */
13886 #include "gt-tree.h"