Fix ICE in lto_symtab_merge_symbols_1 (PR lto/88004).
[official-gcc.git] / gcc / tree.c
blobbe89897d43a9bb0080a84aa59dad7bd57e5b97c8
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static uint64_t tree_code_counts[MAX_TREE_CODES];
133 uint64_t tree_node_counts[(int) all_kinds];
134 uint64_t tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 /* General tree->tree mapping structure for use in hash tables. */
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
250 return a->base.from == b->base.from;
253 static int
254 keep_cache_entry (tree_vec_map *&m)
256 return ggc_marked_p (m->base.from);
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
268 tree global_trees[TI_MAX];
269 tree integer_types[itk_none];
271 bool int_n_enabled_p[NUM_INT_N_ENTS];
272 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
274 bool tree_contains_struct[MAX_TREE_CODES][64];
276 /* Number of operands for each OpenMP clause. */
277 unsigned const char omp_clause_num_ops[] =
279 0, /* OMP_CLAUSE_ERROR */
280 1, /* OMP_CLAUSE_PRIVATE */
281 1, /* OMP_CLAUSE_SHARED */
282 1, /* OMP_CLAUSE_FIRSTPRIVATE */
283 2, /* OMP_CLAUSE_LASTPRIVATE */
284 5, /* OMP_CLAUSE_REDUCTION */
285 5, /* OMP_CLAUSE_TASK_REDUCTION */
286 5, /* OMP_CLAUSE_IN_REDUCTION */
287 1, /* OMP_CLAUSE_COPYIN */
288 1, /* OMP_CLAUSE_COPYPRIVATE */
289 3, /* OMP_CLAUSE_LINEAR */
290 2, /* OMP_CLAUSE_ALIGNED */
291 1, /* OMP_CLAUSE_DEPEND */
292 1, /* OMP_CLAUSE_NONTEMPORAL */
293 1, /* OMP_CLAUSE_UNIFORM */
294 1, /* OMP_CLAUSE_TO_DECLARE */
295 1, /* OMP_CLAUSE_LINK */
296 2, /* OMP_CLAUSE_FROM */
297 2, /* OMP_CLAUSE_TO */
298 2, /* OMP_CLAUSE_MAP */
299 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
300 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
301 2, /* OMP_CLAUSE__CACHE_ */
302 2, /* OMP_CLAUSE_GANG */
303 1, /* OMP_CLAUSE_ASYNC */
304 1, /* OMP_CLAUSE_WAIT */
305 0, /* OMP_CLAUSE_AUTO */
306 0, /* OMP_CLAUSE_SEQ */
307 1, /* OMP_CLAUSE__LOOPTEMP_ */
308 1, /* OMP_CLAUSE__REDUCTEMP_ */
309 1, /* OMP_CLAUSE_IF */
310 1, /* OMP_CLAUSE_NUM_THREADS */
311 1, /* OMP_CLAUSE_SCHEDULE */
312 0, /* OMP_CLAUSE_NOWAIT */
313 1, /* OMP_CLAUSE_ORDERED */
314 0, /* OMP_CLAUSE_DEFAULT */
315 3, /* OMP_CLAUSE_COLLAPSE */
316 0, /* OMP_CLAUSE_UNTIED */
317 1, /* OMP_CLAUSE_FINAL */
318 0, /* OMP_CLAUSE_MERGEABLE */
319 1, /* OMP_CLAUSE_DEVICE */
320 1, /* OMP_CLAUSE_DIST_SCHEDULE */
321 0, /* OMP_CLAUSE_INBRANCH */
322 0, /* OMP_CLAUSE_NOTINBRANCH */
323 1, /* OMP_CLAUSE_NUM_TEAMS */
324 1, /* OMP_CLAUSE_THREAD_LIMIT */
325 0, /* OMP_CLAUSE_PROC_BIND */
326 1, /* OMP_CLAUSE_SAFELEN */
327 1, /* OMP_CLAUSE_SIMDLEN */
328 0, /* OMP_CLAUSE_FOR */
329 0, /* OMP_CLAUSE_PARALLEL */
330 0, /* OMP_CLAUSE_SECTIONS */
331 0, /* OMP_CLAUSE_TASKGROUP */
332 1, /* OMP_CLAUSE_PRIORITY */
333 1, /* OMP_CLAUSE_GRAINSIZE */
334 1, /* OMP_CLAUSE_NUM_TASKS */
335 0, /* OMP_CLAUSE_NOGROUP */
336 0, /* OMP_CLAUSE_THREADS */
337 0, /* OMP_CLAUSE_SIMD */
338 1, /* OMP_CLAUSE_HINT */
339 0, /* OMP_CLAUSE_DEFALTMAP */
340 1, /* OMP_CLAUSE__SIMDUID_ */
341 0, /* OMP_CLAUSE__SIMT_ */
342 0, /* OMP_CLAUSE_INDEPENDENT */
343 1, /* OMP_CLAUSE_WORKER */
344 1, /* OMP_CLAUSE_VECTOR */
345 1, /* OMP_CLAUSE_NUM_GANGS */
346 1, /* OMP_CLAUSE_NUM_WORKERS */
347 1, /* OMP_CLAUSE_VECTOR_LENGTH */
348 3, /* OMP_CLAUSE_TILE */
349 2, /* OMP_CLAUSE__GRIDDIM_ */
350 0, /* OMP_CLAUSE_IF_PRESENT */
351 0, /* OMP_CLAUSE_FINALIZE */
354 const char * const omp_clause_code_name[] =
356 "error_clause",
357 "private",
358 "shared",
359 "firstprivate",
360 "lastprivate",
361 "reduction",
362 "task_reduction",
363 "in_reduction",
364 "copyin",
365 "copyprivate",
366 "linear",
367 "aligned",
368 "depend",
369 "nontemporal",
370 "uniform",
371 "to",
372 "link",
373 "from",
374 "to",
375 "map",
376 "use_device_ptr",
377 "is_device_ptr",
378 "_cache_",
379 "gang",
380 "async",
381 "wait",
382 "auto",
383 "seq",
384 "_looptemp_",
385 "_reductemp_",
386 "if",
387 "num_threads",
388 "schedule",
389 "nowait",
390 "ordered",
391 "default",
392 "collapse",
393 "untied",
394 "final",
395 "mergeable",
396 "device",
397 "dist_schedule",
398 "inbranch",
399 "notinbranch",
400 "num_teams",
401 "thread_limit",
402 "proc_bind",
403 "safelen",
404 "simdlen",
405 "for",
406 "parallel",
407 "sections",
408 "taskgroup",
409 "priority",
410 "grainsize",
411 "num_tasks",
412 "nogroup",
413 "threads",
414 "simd",
415 "hint",
416 "defaultmap",
417 "_simduid_",
418 "_simt_",
419 "independent",
420 "worker",
421 "vector",
422 "num_gangs",
423 "num_workers",
424 "vector_length",
425 "tile",
426 "_griddim_",
427 "if_present",
428 "finalize",
432 /* Return the tree node structure used by tree code CODE. */
434 static inline enum tree_node_structure_enum
435 tree_node_structure_for_code (enum tree_code code)
437 switch (TREE_CODE_CLASS (code))
439 case tcc_declaration:
441 switch (code)
443 case FIELD_DECL:
444 return TS_FIELD_DECL;
445 case PARM_DECL:
446 return TS_PARM_DECL;
447 case VAR_DECL:
448 return TS_VAR_DECL;
449 case LABEL_DECL:
450 return TS_LABEL_DECL;
451 case RESULT_DECL:
452 return TS_RESULT_DECL;
453 case DEBUG_EXPR_DECL:
454 return TS_DECL_WRTL;
455 case CONST_DECL:
456 return TS_CONST_DECL;
457 case TYPE_DECL:
458 return TS_TYPE_DECL;
459 case FUNCTION_DECL:
460 return TS_FUNCTION_DECL;
461 case TRANSLATION_UNIT_DECL:
462 return TS_TRANSLATION_UNIT_DECL;
463 default:
464 return TS_DECL_NON_COMMON;
467 case tcc_type:
468 return TS_TYPE_NON_COMMON;
469 case tcc_reference:
470 case tcc_comparison:
471 case tcc_unary:
472 case tcc_binary:
473 case tcc_expression:
474 case tcc_statement:
475 case tcc_vl_exp:
476 return TS_EXP;
477 default: /* tcc_constant and tcc_exceptional */
478 break;
480 switch (code)
482 /* tcc_constant cases. */
483 case VOID_CST: return TS_TYPED;
484 case INTEGER_CST: return TS_INT_CST;
485 case POLY_INT_CST: return TS_POLY_INT_CST;
486 case REAL_CST: return TS_REAL_CST;
487 case FIXED_CST: return TS_FIXED_CST;
488 case COMPLEX_CST: return TS_COMPLEX;
489 case VECTOR_CST: return TS_VECTOR;
490 case STRING_CST: return TS_STRING;
491 /* tcc_exceptional cases. */
492 case ERROR_MARK: return TS_COMMON;
493 case IDENTIFIER_NODE: return TS_IDENTIFIER;
494 case TREE_LIST: return TS_LIST;
495 case TREE_VEC: return TS_VEC;
496 case SSA_NAME: return TS_SSA_NAME;
497 case PLACEHOLDER_EXPR: return TS_COMMON;
498 case STATEMENT_LIST: return TS_STATEMENT_LIST;
499 case BLOCK: return TS_BLOCK;
500 case CONSTRUCTOR: return TS_CONSTRUCTOR;
501 case TREE_BINFO: return TS_BINFO;
502 case OMP_CLAUSE: return TS_OMP_CLAUSE;
503 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
504 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
506 default:
507 gcc_unreachable ();
512 /* Initialize tree_contains_struct to describe the hierarchy of tree
513 nodes. */
515 static void
516 initialize_tree_contains_struct (void)
518 unsigned i;
520 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
522 enum tree_code code;
523 enum tree_node_structure_enum ts_code;
525 code = (enum tree_code) i;
526 ts_code = tree_node_structure_for_code (code);
528 /* Mark the TS structure itself. */
529 tree_contains_struct[code][ts_code] = 1;
531 /* Mark all the structures that TS is derived from. */
532 switch (ts_code)
534 case TS_TYPED:
535 case TS_BLOCK:
536 case TS_OPTIMIZATION:
537 case TS_TARGET_OPTION:
538 MARK_TS_BASE (code);
539 break;
541 case TS_COMMON:
542 case TS_INT_CST:
543 case TS_POLY_INT_CST:
544 case TS_REAL_CST:
545 case TS_FIXED_CST:
546 case TS_VECTOR:
547 case TS_STRING:
548 case TS_COMPLEX:
549 case TS_SSA_NAME:
550 case TS_CONSTRUCTOR:
551 case TS_EXP:
552 case TS_STATEMENT_LIST:
553 MARK_TS_TYPED (code);
554 break;
556 case TS_IDENTIFIER:
557 case TS_DECL_MINIMAL:
558 case TS_TYPE_COMMON:
559 case TS_LIST:
560 case TS_VEC:
561 case TS_BINFO:
562 case TS_OMP_CLAUSE:
563 MARK_TS_COMMON (code);
564 break;
566 case TS_TYPE_WITH_LANG_SPECIFIC:
567 MARK_TS_TYPE_COMMON (code);
568 break;
570 case TS_TYPE_NON_COMMON:
571 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
572 break;
574 case TS_DECL_COMMON:
575 MARK_TS_DECL_MINIMAL (code);
576 break;
578 case TS_DECL_WRTL:
579 case TS_CONST_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
583 case TS_DECL_NON_COMMON:
584 MARK_TS_DECL_WITH_VIS (code);
585 break;
587 case TS_DECL_WITH_VIS:
588 case TS_PARM_DECL:
589 case TS_LABEL_DECL:
590 case TS_RESULT_DECL:
591 MARK_TS_DECL_WRTL (code);
592 break;
594 case TS_FIELD_DECL:
595 MARK_TS_DECL_COMMON (code);
596 break;
598 case TS_VAR_DECL:
599 MARK_TS_DECL_WITH_VIS (code);
600 break;
602 case TS_TYPE_DECL:
603 case TS_FUNCTION_DECL:
604 MARK_TS_DECL_NON_COMMON (code);
605 break;
607 case TS_TRANSLATION_UNIT_DECL:
608 MARK_TS_DECL_COMMON (code);
609 break;
611 default:
612 gcc_unreachable ();
616 /* Basic consistency checks for attributes used in fold. */
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
619 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
620 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
621 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
629 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
630 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
634 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
644 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
646 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
649 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
650 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
651 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
653 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
655 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
660 /* Init tree.c. */
662 void
663 init_ttree (void)
665 /* Initialize the hash table of types. */
666 type_hash_table
667 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
669 debug_expr_for_decl
670 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
672 value_expr_for_decl
673 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
675 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
677 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
679 int_cst_node = make_int_cst (1, 1);
681 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
683 cl_optimization_node = make_node (OPTIMIZATION_NODE);
684 cl_target_option_node = make_node (TARGET_OPTION_NODE);
686 /* Initialize the tree_contains_struct array. */
687 initialize_tree_contains_struct ();
688 lang_hooks.init_ts ();
692 /* The name of the object as the assembler will see it (but before any
693 translations made by ASM_OUTPUT_LABELREF). Often this is the same
694 as DECL_NAME. It is an IDENTIFIER_NODE. */
695 tree
696 decl_assembler_name (tree decl)
698 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
699 lang_hooks.set_decl_assembler_name (decl);
700 return DECL_ASSEMBLER_NAME_RAW (decl);
703 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
704 (either of which may be NULL). Inform the FE, if this changes the
705 name. */
707 void
708 overwrite_decl_assembler_name (tree decl, tree name)
710 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
711 lang_hooks.overwrite_decl_assembler_name (decl, name);
714 /* When the target supports COMDAT groups, this indicates which group the
715 DECL is associated with. This can be either an IDENTIFIER_NODE or a
716 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
717 tree
718 decl_comdat_group (const_tree node)
720 struct symtab_node *snode = symtab_node::get (node);
721 if (!snode)
722 return NULL;
723 return snode->get_comdat_group ();
726 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
727 tree
728 decl_comdat_group_id (const_tree node)
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_comdat_group_id ();
736 /* When the target supports named section, return its name as IDENTIFIER_NODE
737 or NULL if it is in no section. */
738 const char *
739 decl_section_name (const_tree node)
741 struct symtab_node *snode = symtab_node::get (node);
742 if (!snode)
743 return NULL;
744 return snode->get_section ();
747 /* Set section name of NODE to VALUE (that is expected to be
748 identifier node) */
749 void
750 set_decl_section_name (tree node, const char *value)
752 struct symtab_node *snode;
754 if (value == NULL)
756 snode = symtab_node::get (node);
757 if (!snode)
758 return;
760 else if (VAR_P (node))
761 snode = varpool_node::get_create (node);
762 else
763 snode = cgraph_node::get_create (node);
764 snode->set_section (value);
767 /* Return TLS model of a variable NODE. */
768 enum tls_model
769 decl_tls_model (const_tree node)
771 struct varpool_node *snode = varpool_node::get (node);
772 if (!snode)
773 return TLS_MODEL_NONE;
774 return snode->tls_model;
777 /* Set TLS model of variable NODE to MODEL. */
778 void
779 set_decl_tls_model (tree node, enum tls_model model)
781 struct varpool_node *vnode;
783 if (model == TLS_MODEL_NONE)
785 vnode = varpool_node::get (node);
786 if (!vnode)
787 return;
789 else
790 vnode = varpool_node::get_create (node);
791 vnode->tls_model = model;
794 /* Compute the number of bytes occupied by a tree with code CODE.
795 This function cannot be used for nodes that have variable sizes,
796 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
797 size_t
798 tree_code_size (enum tree_code code)
800 switch (TREE_CODE_CLASS (code))
802 case tcc_declaration: /* A decl node */
803 switch (code)
805 case FIELD_DECL: return sizeof (tree_field_decl);
806 case PARM_DECL: return sizeof (tree_parm_decl);
807 case VAR_DECL: return sizeof (tree_var_decl);
808 case LABEL_DECL: return sizeof (tree_label_decl);
809 case RESULT_DECL: return sizeof (tree_result_decl);
810 case CONST_DECL: return sizeof (tree_const_decl);
811 case TYPE_DECL: return sizeof (tree_type_decl);
812 case FUNCTION_DECL: return sizeof (tree_function_decl);
813 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
814 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
815 case NAMESPACE_DECL:
816 case IMPORTED_DECL:
817 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
818 default:
819 gcc_checking_assert (code >= NUM_TREE_CODES);
820 return lang_hooks.tree_size (code);
823 case tcc_type: /* a type node */
824 switch (code)
826 case OFFSET_TYPE:
827 case ENUMERAL_TYPE:
828 case BOOLEAN_TYPE:
829 case INTEGER_TYPE:
830 case REAL_TYPE:
831 case POINTER_TYPE:
832 case REFERENCE_TYPE:
833 case NULLPTR_TYPE:
834 case FIXED_POINT_TYPE:
835 case COMPLEX_TYPE:
836 case VECTOR_TYPE:
837 case ARRAY_TYPE:
838 case RECORD_TYPE:
839 case UNION_TYPE:
840 case QUAL_UNION_TYPE:
841 case VOID_TYPE:
842 case FUNCTION_TYPE:
843 case METHOD_TYPE:
844 case LANG_TYPE: return sizeof (tree_type_non_common);
845 default:
846 gcc_checking_assert (code >= NUM_TREE_CODES);
847 return lang_hooks.tree_size (code);
850 case tcc_reference: /* a reference */
851 case tcc_expression: /* an expression */
852 case tcc_statement: /* an expression with side effects */
853 case tcc_comparison: /* a comparison expression */
854 case tcc_unary: /* a unary arithmetic expression */
855 case tcc_binary: /* a binary arithmetic expression */
856 return (sizeof (struct tree_exp)
857 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
859 case tcc_constant: /* a constant */
860 switch (code)
862 case VOID_CST: return sizeof (tree_typed);
863 case INTEGER_CST: gcc_unreachable ();
864 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
865 case REAL_CST: return sizeof (tree_real_cst);
866 case FIXED_CST: return sizeof (tree_fixed_cst);
867 case COMPLEX_CST: return sizeof (tree_complex);
868 case VECTOR_CST: gcc_unreachable ();
869 case STRING_CST: gcc_unreachable ();
870 default:
871 gcc_checking_assert (code >= NUM_TREE_CODES);
872 return lang_hooks.tree_size (code);
875 case tcc_exceptional: /* something random, like an identifier. */
876 switch (code)
878 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
879 case TREE_LIST: return sizeof (tree_list);
881 case ERROR_MARK:
882 case PLACEHOLDER_EXPR: return sizeof (tree_common);
884 case TREE_VEC: gcc_unreachable ();
885 case OMP_CLAUSE: gcc_unreachable ();
887 case SSA_NAME: return sizeof (tree_ssa_name);
889 case STATEMENT_LIST: return sizeof (tree_statement_list);
890 case BLOCK: return sizeof (struct tree_block);
891 case CONSTRUCTOR: return sizeof (tree_constructor);
892 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
893 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
895 default:
896 gcc_checking_assert (code >= NUM_TREE_CODES);
897 return lang_hooks.tree_size (code);
900 default:
901 gcc_unreachable ();
905 /* Compute the number of bytes occupied by NODE. This routine only
906 looks at TREE_CODE, except for those nodes that have variable sizes. */
907 size_t
908 tree_size (const_tree node)
910 const enum tree_code code = TREE_CODE (node);
911 switch (code)
913 case INTEGER_CST:
914 return (sizeof (struct tree_int_cst)
915 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
917 case TREE_BINFO:
918 return (offsetof (struct tree_binfo, base_binfos)
919 + vec<tree, va_gc>
920 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
922 case TREE_VEC:
923 return (sizeof (struct tree_vec)
924 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
926 case VECTOR_CST:
927 return (sizeof (struct tree_vector)
928 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
930 case STRING_CST:
931 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
933 case OMP_CLAUSE:
934 return (sizeof (struct tree_omp_clause)
935 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
936 * sizeof (tree));
938 default:
939 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
940 return (sizeof (struct tree_exp)
941 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
942 else
943 return tree_code_size (code);
947 /* Return tree node kind based on tree CODE. */
949 static tree_node_kind
950 get_stats_node_kind (enum tree_code code)
952 enum tree_code_class type = TREE_CODE_CLASS (code);
954 switch (type)
956 case tcc_declaration: /* A decl node */
957 return d_kind;
958 case tcc_type: /* a type node */
959 return t_kind;
960 case tcc_statement: /* an expression with side effects */
961 return s_kind;
962 case tcc_reference: /* a reference */
963 return r_kind;
964 case tcc_expression: /* an expression */
965 case tcc_comparison: /* a comparison expression */
966 case tcc_unary: /* a unary arithmetic expression */
967 case tcc_binary: /* a binary arithmetic expression */
968 return e_kind;
969 case tcc_constant: /* a constant */
970 return c_kind;
971 case tcc_exceptional: /* something random, like an identifier. */
972 switch (code)
974 case IDENTIFIER_NODE:
975 return id_kind;
976 case TREE_VEC:
977 return vec_kind;
978 case TREE_BINFO:
979 return binfo_kind;
980 case SSA_NAME:
981 return ssa_name_kind;
982 case BLOCK:
983 return b_kind;
984 case CONSTRUCTOR:
985 return constr_kind;
986 case OMP_CLAUSE:
987 return omp_clause_kind;
988 default:
989 return x_kind;
991 break;
992 case tcc_vl_exp:
993 return e_kind;
994 default:
995 gcc_unreachable ();
999 /* Record interesting allocation statistics for a tree node with CODE
1000 and LENGTH. */
1002 static void
1003 record_node_allocation_statistics (enum tree_code code, size_t length)
1005 if (!GATHER_STATISTICS)
1006 return;
1008 tree_node_kind kind = get_stats_node_kind (code);
1010 tree_code_counts[(int) code]++;
1011 tree_node_counts[(int) kind]++;
1012 tree_node_sizes[(int) kind] += length;
1015 /* Allocate and return a new UID from the DECL_UID namespace. */
1018 allocate_decl_uid (void)
1020 return next_decl_uid++;
1023 /* Return a newly allocated node of code CODE. For decl and type
1024 nodes, some other fields are initialized. The rest of the node is
1025 initialized to zero. This function cannot be used for TREE_VEC,
1026 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1027 tree_code_size.
1029 Achoo! I got a code in the node. */
1031 tree
1032 make_node (enum tree_code code MEM_STAT_DECL)
1034 tree t;
1035 enum tree_code_class type = TREE_CODE_CLASS (code);
1036 size_t length = tree_code_size (code);
1038 record_node_allocation_statistics (code, length);
1040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1041 TREE_SET_CODE (t, code);
1043 switch (type)
1045 case tcc_statement:
1046 if (code != DEBUG_BEGIN_STMT)
1047 TREE_SIDE_EFFECTS (t) = 1;
1048 break;
1050 case tcc_declaration:
1051 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1053 if (code == FUNCTION_DECL)
1055 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1056 SET_DECL_MODE (t, FUNCTION_MODE);
1058 else
1059 SET_DECL_ALIGN (t, 1);
1061 DECL_SOURCE_LOCATION (t) = input_location;
1062 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1063 DECL_UID (t) = --next_debug_decl_uid;
1064 else
1066 DECL_UID (t) = allocate_decl_uid ();
1067 SET_DECL_PT_UID (t, -1);
1069 if (TREE_CODE (t) == LABEL_DECL)
1070 LABEL_DECL_UID (t) = -1;
1072 break;
1074 case tcc_type:
1075 TYPE_UID (t) = next_type_uid++;
1076 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1077 TYPE_USER_ALIGN (t) = 0;
1078 TYPE_MAIN_VARIANT (t) = t;
1079 TYPE_CANONICAL (t) = t;
1081 /* Default to no attributes for type, but let target change that. */
1082 TYPE_ATTRIBUTES (t) = NULL_TREE;
1083 targetm.set_default_type_attributes (t);
1085 /* We have not yet computed the alias set for this type. */
1086 TYPE_ALIAS_SET (t) = -1;
1087 break;
1089 case tcc_constant:
1090 TREE_CONSTANT (t) = 1;
1091 break;
1093 case tcc_expression:
1094 switch (code)
1096 case INIT_EXPR:
1097 case MODIFY_EXPR:
1098 case VA_ARG_EXPR:
1099 case PREDECREMENT_EXPR:
1100 case PREINCREMENT_EXPR:
1101 case POSTDECREMENT_EXPR:
1102 case POSTINCREMENT_EXPR:
1103 /* All of these have side-effects, no matter what their
1104 operands are. */
1105 TREE_SIDE_EFFECTS (t) = 1;
1106 break;
1108 default:
1109 break;
1111 break;
1113 case tcc_exceptional:
1114 switch (code)
1116 case TARGET_OPTION_NODE:
1117 TREE_TARGET_OPTION(t)
1118 = ggc_cleared_alloc<struct cl_target_option> ();
1119 break;
1121 case OPTIMIZATION_NODE:
1122 TREE_OPTIMIZATION (t)
1123 = ggc_cleared_alloc<struct cl_optimization> ();
1124 break;
1126 default:
1127 break;
1129 break;
1131 default:
1132 /* Other classes need no special treatment. */
1133 break;
1136 return t;
1139 /* Free tree node. */
1141 void
1142 free_node (tree node)
1144 enum tree_code code = TREE_CODE (node);
1145 if (GATHER_STATISTICS)
1147 enum tree_node_kind kind = get_stats_node_kind (code);
1149 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1150 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1151 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1153 tree_code_counts[(int) TREE_CODE (node)]--;
1154 tree_node_counts[(int) kind]--;
1155 tree_node_sizes[(int) kind] -= tree_size (node);
1157 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1158 vec_free (CONSTRUCTOR_ELTS (node));
1159 else if (code == BLOCK)
1160 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1161 else if (code == TREE_BINFO)
1162 vec_free (BINFO_BASE_ACCESSES (node));
1163 ggc_free (node);
1166 /* Return a new node with the same contents as NODE except that its
1167 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1169 tree
1170 copy_node (tree node MEM_STAT_DECL)
1172 tree t;
1173 enum tree_code code = TREE_CODE (node);
1174 size_t length;
1176 gcc_assert (code != STATEMENT_LIST);
1178 length = tree_size (node);
1179 record_node_allocation_statistics (code, length);
1180 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1181 memcpy (t, node, length);
1183 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1184 TREE_CHAIN (t) = 0;
1185 TREE_ASM_WRITTEN (t) = 0;
1186 TREE_VISITED (t) = 0;
1188 if (TREE_CODE_CLASS (code) == tcc_declaration)
1190 if (code == DEBUG_EXPR_DECL)
1191 DECL_UID (t) = --next_debug_decl_uid;
1192 else
1194 DECL_UID (t) = allocate_decl_uid ();
1195 if (DECL_PT_UID_SET_P (node))
1196 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1198 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1199 && DECL_HAS_VALUE_EXPR_P (node))
1201 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1202 DECL_HAS_VALUE_EXPR_P (t) = 1;
1204 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1205 if (VAR_P (node))
1207 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1208 t->decl_with_vis.symtab_node = NULL;
1210 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1212 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1213 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1215 if (TREE_CODE (node) == FUNCTION_DECL)
1217 DECL_STRUCT_FUNCTION (t) = NULL;
1218 t->decl_with_vis.symtab_node = NULL;
1221 else if (TREE_CODE_CLASS (code) == tcc_type)
1223 TYPE_UID (t) = next_type_uid++;
1224 /* The following is so that the debug code for
1225 the copy is different from the original type.
1226 The two statements usually duplicate each other
1227 (because they clear fields of the same union),
1228 but the optimizer should catch that. */
1229 TYPE_SYMTAB_ADDRESS (t) = 0;
1230 TYPE_SYMTAB_DIE (t) = 0;
1232 /* Do not copy the values cache. */
1233 if (TYPE_CACHED_VALUES_P (t))
1235 TYPE_CACHED_VALUES_P (t) = 0;
1236 TYPE_CACHED_VALUES (t) = NULL_TREE;
1239 else if (code == TARGET_OPTION_NODE)
1241 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1242 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1243 sizeof (struct cl_target_option));
1245 else if (code == OPTIMIZATION_NODE)
1247 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1248 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1249 sizeof (struct cl_optimization));
1252 return t;
1255 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1256 For example, this can copy a list made of TREE_LIST nodes. */
1258 tree
1259 copy_list (tree list)
1261 tree head;
1262 tree prev, next;
1264 if (list == 0)
1265 return 0;
1267 head = prev = copy_node (list);
1268 next = TREE_CHAIN (list);
1269 while (next)
1271 TREE_CHAIN (prev) = copy_node (next);
1272 prev = TREE_CHAIN (prev);
1273 next = TREE_CHAIN (next);
1275 return head;
1279 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1280 INTEGER_CST with value CST and type TYPE. */
1282 static unsigned int
1283 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1285 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1286 /* We need extra HWIs if CST is an unsigned integer with its
1287 upper bit set. */
1288 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1289 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1290 return cst.get_len ();
1293 /* Return a new INTEGER_CST with value CST and type TYPE. */
1295 static tree
1296 build_new_int_cst (tree type, const wide_int &cst)
1298 unsigned int len = cst.get_len ();
1299 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1300 tree nt = make_int_cst (len, ext_len);
1302 if (len < ext_len)
1304 --ext_len;
1305 TREE_INT_CST_ELT (nt, ext_len)
1306 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1307 for (unsigned int i = len; i < ext_len; ++i)
1308 TREE_INT_CST_ELT (nt, i) = -1;
1310 else if (TYPE_UNSIGNED (type)
1311 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1313 len--;
1314 TREE_INT_CST_ELT (nt, len)
1315 = zext_hwi (cst.elt (len),
1316 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1319 for (unsigned int i = 0; i < len; i++)
1320 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1321 TREE_TYPE (nt) = type;
1322 return nt;
1325 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1327 static tree
1328 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1329 CXX_MEM_STAT_INFO)
1331 size_t length = sizeof (struct tree_poly_int_cst);
1332 record_node_allocation_statistics (POLY_INT_CST, length);
1334 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1336 TREE_SET_CODE (t, POLY_INT_CST);
1337 TREE_CONSTANT (t) = 1;
1338 TREE_TYPE (t) = type;
1339 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1340 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1341 return t;
1344 /* Create a constant tree that contains CST sign-extended to TYPE. */
1346 tree
1347 build_int_cst (tree type, poly_int64 cst)
1349 /* Support legacy code. */
1350 if (!type)
1351 type = integer_type_node;
1353 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1356 /* Create a constant tree that contains CST zero-extended to TYPE. */
1358 tree
1359 build_int_cstu (tree type, poly_uint64 cst)
1361 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1364 /* Create a constant tree that contains CST sign-extended to TYPE. */
1366 tree
1367 build_int_cst_type (tree type, poly_int64 cst)
1369 gcc_assert (type);
1370 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1373 /* Constructs tree in type TYPE from with value given by CST. Signedness
1374 of CST is assumed to be the same as the signedness of TYPE. */
1376 tree
1377 double_int_to_tree (tree type, double_int cst)
1379 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1382 /* We force the wide_int CST to the range of the type TYPE by sign or
1383 zero extending it. OVERFLOWABLE indicates if we are interested in
1384 overflow of the value, when >0 we are only interested in signed
1385 overflow, for <0 we are interested in any overflow. OVERFLOWED
1386 indicates whether overflow has already occurred. CONST_OVERFLOWED
1387 indicates whether constant overflow has already occurred. We force
1388 T's value to be within range of T's type (by setting to 0 or 1 all
1389 the bits outside the type's range). We set TREE_OVERFLOWED if,
1390 OVERFLOWED is nonzero,
1391 or OVERFLOWABLE is >0 and signed overflow occurs
1392 or OVERFLOWABLE is <0 and any overflow occurs
1393 We return a new tree node for the extended wide_int. The node
1394 is shared if no overflow flags are set. */
1397 tree
1398 force_fit_type (tree type, const poly_wide_int_ref &cst,
1399 int overflowable, bool overflowed)
1401 signop sign = TYPE_SIGN (type);
1403 /* If we need to set overflow flags, return a new unshared node. */
1404 if (overflowed || !wi::fits_to_tree_p (cst, type))
1406 if (overflowed
1407 || overflowable < 0
1408 || (overflowable > 0 && sign == SIGNED))
1410 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1411 sign);
1412 tree t;
1413 if (tmp.is_constant ())
1414 t = build_new_int_cst (type, tmp.coeffs[0]);
1415 else
1417 tree coeffs[NUM_POLY_INT_COEFFS];
1418 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1420 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1421 TREE_OVERFLOW (coeffs[i]) = 1;
1423 t = build_new_poly_int_cst (type, coeffs);
1425 TREE_OVERFLOW (t) = 1;
1426 return t;
1430 /* Else build a shared node. */
1431 return wide_int_to_tree (type, cst);
1434 /* These are the hash table functions for the hash table of INTEGER_CST
1435 nodes of a sizetype. */
1437 /* Return the hash code X, an INTEGER_CST. */
1439 hashval_t
1440 int_cst_hasher::hash (tree x)
1442 const_tree const t = x;
1443 hashval_t code = TYPE_UID (TREE_TYPE (t));
1444 int i;
1446 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1447 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1449 return code;
1452 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1453 is the same as that given by *Y, which is the same. */
1455 bool
1456 int_cst_hasher::equal (tree x, tree y)
1458 const_tree const xt = x;
1459 const_tree const yt = y;
1461 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1462 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1463 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1464 return false;
1466 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1467 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1468 return false;
1470 return true;
1473 /* Create an INT_CST node of TYPE and value CST.
1474 The returned node is always shared. For small integers we use a
1475 per-type vector cache, for larger ones we use a single hash table.
1476 The value is extended from its precision according to the sign of
1477 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1478 the upper bits and ensures that hashing and value equality based
1479 upon the underlying HOST_WIDE_INTs works without masking. */
1481 static tree
1482 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1484 tree t;
1485 int ix = -1;
1486 int limit = 0;
1488 gcc_assert (type);
1489 unsigned int prec = TYPE_PRECISION (type);
1490 signop sgn = TYPE_SIGN (type);
1492 /* Verify that everything is canonical. */
1493 int l = pcst.get_len ();
1494 if (l > 1)
1496 if (pcst.elt (l - 1) == 0)
1497 gcc_checking_assert (pcst.elt (l - 2) < 0);
1498 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1499 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1502 wide_int cst = wide_int::from (pcst, prec, sgn);
1503 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1505 if (ext_len == 1)
1507 /* We just need to store a single HOST_WIDE_INT. */
1508 HOST_WIDE_INT hwi;
1509 if (TYPE_UNSIGNED (type))
1510 hwi = cst.to_uhwi ();
1511 else
1512 hwi = cst.to_shwi ();
1514 switch (TREE_CODE (type))
1516 case NULLPTR_TYPE:
1517 gcc_assert (hwi == 0);
1518 /* Fallthru. */
1520 case POINTER_TYPE:
1521 case REFERENCE_TYPE:
1522 /* Cache NULL pointer and zero bounds. */
1523 if (hwi == 0)
1525 limit = 1;
1526 ix = 0;
1528 break;
1530 case BOOLEAN_TYPE:
1531 /* Cache false or true. */
1532 limit = 2;
1533 if (IN_RANGE (hwi, 0, 1))
1534 ix = hwi;
1535 break;
1537 case INTEGER_TYPE:
1538 case OFFSET_TYPE:
1539 if (TYPE_SIGN (type) == UNSIGNED)
1541 /* Cache [0, N). */
1542 limit = INTEGER_SHARE_LIMIT;
1543 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1544 ix = hwi;
1546 else
1548 /* Cache [-1, N). */
1549 limit = INTEGER_SHARE_LIMIT + 1;
1550 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1551 ix = hwi + 1;
1553 break;
1555 case ENUMERAL_TYPE:
1556 break;
1558 default:
1559 gcc_unreachable ();
1562 if (ix >= 0)
1564 /* Look for it in the type's vector of small shared ints. */
1565 if (!TYPE_CACHED_VALUES_P (type))
1567 TYPE_CACHED_VALUES_P (type) = 1;
1568 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1571 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1572 if (t)
1573 /* Make sure no one is clobbering the shared constant. */
1574 gcc_checking_assert (TREE_TYPE (t) == type
1575 && TREE_INT_CST_NUNITS (t) == 1
1576 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1577 && TREE_INT_CST_EXT_NUNITS (t) == 1
1578 && TREE_INT_CST_ELT (t, 0) == hwi);
1579 else
1581 /* Create a new shared int. */
1582 t = build_new_int_cst (type, cst);
1583 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1586 else
1588 /* Use the cache of larger shared ints, using int_cst_node as
1589 a temporary. */
1591 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1592 TREE_TYPE (int_cst_node) = type;
1594 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1595 t = *slot;
1596 if (!t)
1598 /* Insert this one into the hash table. */
1599 t = int_cst_node;
1600 *slot = t;
1601 /* Make a new node for next time round. */
1602 int_cst_node = make_int_cst (1, 1);
1606 else
1608 /* The value either hashes properly or we drop it on the floor
1609 for the gc to take care of. There will not be enough of them
1610 to worry about. */
1612 tree nt = build_new_int_cst (type, cst);
1613 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1614 t = *slot;
1615 if (!t)
1617 /* Insert this one into the hash table. */
1618 t = nt;
1619 *slot = t;
1621 else
1622 ggc_free (nt);
1625 return t;
1628 hashval_t
1629 poly_int_cst_hasher::hash (tree t)
1631 inchash::hash hstate;
1633 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1634 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1635 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1637 return hstate.end ();
1640 bool
1641 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1643 if (TREE_TYPE (x) != y.first)
1644 return false;
1645 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1646 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1647 return false;
1648 return true;
1651 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1652 The elements must also have type TYPE. */
1654 tree
1655 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1657 unsigned int prec = TYPE_PRECISION (type);
1658 gcc_assert (prec <= values.coeffs[0].get_precision ());
1659 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1661 inchash::hash h;
1662 h.add_int (TYPE_UID (type));
1663 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1664 h.add_wide_int (c.coeffs[i]);
1665 poly_int_cst_hasher::compare_type comp (type, &c);
1666 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1667 INSERT);
1668 if (*slot == NULL_TREE)
1670 tree coeffs[NUM_POLY_INT_COEFFS];
1671 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1672 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1673 *slot = build_new_poly_int_cst (type, coeffs);
1675 return *slot;
1678 /* Create a constant tree with value VALUE in type TYPE. */
1680 tree
1681 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1683 if (value.is_constant ())
1684 return wide_int_to_tree_1 (type, value.coeffs[0]);
1685 return build_poly_int_cst (type, value);
1688 void
1689 cache_integer_cst (tree t)
1691 tree type = TREE_TYPE (t);
1692 int ix = -1;
1693 int limit = 0;
1694 int prec = TYPE_PRECISION (type);
1696 gcc_assert (!TREE_OVERFLOW (t));
1698 switch (TREE_CODE (type))
1700 case NULLPTR_TYPE:
1701 gcc_assert (integer_zerop (t));
1702 /* Fallthru. */
1704 case POINTER_TYPE:
1705 case REFERENCE_TYPE:
1706 /* Cache NULL pointer. */
1707 if (integer_zerop (t))
1709 limit = 1;
1710 ix = 0;
1712 break;
1714 case BOOLEAN_TYPE:
1715 /* Cache false or true. */
1716 limit = 2;
1717 if (wi::ltu_p (wi::to_wide (t), 2))
1718 ix = TREE_INT_CST_ELT (t, 0);
1719 break;
1721 case INTEGER_TYPE:
1722 case OFFSET_TYPE:
1723 if (TYPE_UNSIGNED (type))
1725 /* Cache 0..N */
1726 limit = INTEGER_SHARE_LIMIT;
1728 /* This is a little hokie, but if the prec is smaller than
1729 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1730 obvious test will not get the correct answer. */
1731 if (prec < HOST_BITS_PER_WIDE_INT)
1733 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1734 ix = tree_to_uhwi (t);
1736 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1737 ix = tree_to_uhwi (t);
1739 else
1741 /* Cache -1..N */
1742 limit = INTEGER_SHARE_LIMIT + 1;
1744 if (integer_minus_onep (t))
1745 ix = 0;
1746 else if (!wi::neg_p (wi::to_wide (t)))
1748 if (prec < HOST_BITS_PER_WIDE_INT)
1750 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1751 ix = tree_to_shwi (t) + 1;
1753 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1754 ix = tree_to_shwi (t) + 1;
1757 break;
1759 case ENUMERAL_TYPE:
1760 break;
1762 default:
1763 gcc_unreachable ();
1766 if (ix >= 0)
1768 /* Look for it in the type's vector of small shared ints. */
1769 if (!TYPE_CACHED_VALUES_P (type))
1771 TYPE_CACHED_VALUES_P (type) = 1;
1772 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1775 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1776 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1778 else
1780 /* Use the cache of larger shared ints. */
1781 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1782 /* If there is already an entry for the number verify it's the
1783 same. */
1784 if (*slot)
1785 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1786 else
1787 /* Otherwise insert this one into the hash table. */
1788 *slot = t;
1793 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1794 and the rest are zeros. */
1796 tree
1797 build_low_bits_mask (tree type, unsigned bits)
1799 gcc_assert (bits <= TYPE_PRECISION (type));
1801 return wide_int_to_tree (type, wi::mask (bits, false,
1802 TYPE_PRECISION (type)));
1805 /* Checks that X is integer constant that can be expressed in (unsigned)
1806 HOST_WIDE_INT without loss of precision. */
1808 bool
1809 cst_and_fits_in_hwi (const_tree x)
1811 return (TREE_CODE (x) == INTEGER_CST
1812 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1815 /* Build a newly constructed VECTOR_CST with the given values of
1816 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1818 tree
1819 make_vector (unsigned log2_npatterns,
1820 unsigned int nelts_per_pattern MEM_STAT_DECL)
1822 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1823 tree t;
1824 unsigned npatterns = 1 << log2_npatterns;
1825 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1826 unsigned length = (sizeof (struct tree_vector)
1827 + (encoded_nelts - 1) * sizeof (tree));
1829 record_node_allocation_statistics (VECTOR_CST, length);
1831 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1833 TREE_SET_CODE (t, VECTOR_CST);
1834 TREE_CONSTANT (t) = 1;
1835 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1836 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1838 return t;
1841 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1842 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1844 tree
1845 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1847 unsigned HOST_WIDE_INT idx, nelts;
1848 tree value;
1850 /* We can't construct a VECTOR_CST for a variable number of elements. */
1851 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1852 tree_vector_builder vec (type, nelts, 1);
1853 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1855 if (TREE_CODE (value) == VECTOR_CST)
1857 /* If NELTS is constant then this must be too. */
1858 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1859 for (unsigned i = 0; i < sub_nelts; ++i)
1860 vec.quick_push (VECTOR_CST_ELT (value, i));
1862 else
1863 vec.quick_push (value);
1865 while (vec.length () < nelts)
1866 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1868 return vec.build ();
1871 /* Build a vector of type VECTYPE where all the elements are SCs. */
1872 tree
1873 build_vector_from_val (tree vectype, tree sc)
1875 unsigned HOST_WIDE_INT i, nunits;
1877 if (sc == error_mark_node)
1878 return sc;
1880 /* Verify that the vector type is suitable for SC. Note that there
1881 is some inconsistency in the type-system with respect to restrict
1882 qualifications of pointers. Vector types always have a main-variant
1883 element type and the qualification is applied to the vector-type.
1884 So TREE_TYPE (vector-type) does not return a properly qualified
1885 vector element-type. */
1886 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1887 TREE_TYPE (vectype)));
1889 if (CONSTANT_CLASS_P (sc))
1891 tree_vector_builder v (vectype, 1, 1);
1892 v.quick_push (sc);
1893 return v.build ();
1895 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1896 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1897 else
1899 vec<constructor_elt, va_gc> *v;
1900 vec_alloc (v, nunits);
1901 for (i = 0; i < nunits; ++i)
1902 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1903 return build_constructor (vectype, v);
1907 /* Build a vector series of type TYPE in which element I has the value
1908 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1909 and a VEC_SERIES_EXPR otherwise. */
1911 tree
1912 build_vec_series (tree type, tree base, tree step)
1914 if (integer_zerop (step))
1915 return build_vector_from_val (type, base);
1916 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1918 tree_vector_builder builder (type, 1, 3);
1919 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1920 wi::to_wide (base) + wi::to_wide (step));
1921 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1922 wi::to_wide (elt1) + wi::to_wide (step));
1923 builder.quick_push (base);
1924 builder.quick_push (elt1);
1925 builder.quick_push (elt2);
1926 return builder.build ();
1928 return build2 (VEC_SERIES_EXPR, type, base, step);
1931 /* Return a vector with the same number of units and number of bits
1932 as VEC_TYPE, but in which the elements are a linear series of unsigned
1933 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1935 tree
1936 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1938 tree index_vec_type = vec_type;
1939 tree index_elt_type = TREE_TYPE (vec_type);
1940 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1941 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1943 index_elt_type = build_nonstandard_integer_type
1944 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1945 index_vec_type = build_vector_type (index_elt_type, nunits);
1948 tree_vector_builder v (index_vec_type, 1, 3);
1949 for (unsigned int i = 0; i < 3; ++i)
1950 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1951 return v.build ();
1954 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1955 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1957 void
1958 recompute_constructor_flags (tree c)
1960 unsigned int i;
1961 tree val;
1962 bool constant_p = true;
1963 bool side_effects_p = false;
1964 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1966 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1968 /* Mostly ctors will have elts that don't have side-effects, so
1969 the usual case is to scan all the elements. Hence a single
1970 loop for both const and side effects, rather than one loop
1971 each (with early outs). */
1972 if (!TREE_CONSTANT (val))
1973 constant_p = false;
1974 if (TREE_SIDE_EFFECTS (val))
1975 side_effects_p = true;
1978 TREE_SIDE_EFFECTS (c) = side_effects_p;
1979 TREE_CONSTANT (c) = constant_p;
1982 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1983 CONSTRUCTOR C. */
1985 void
1986 verify_constructor_flags (tree c)
1988 unsigned int i;
1989 tree val;
1990 bool constant_p = TREE_CONSTANT (c);
1991 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1992 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1994 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1996 if (constant_p && !TREE_CONSTANT (val))
1997 internal_error ("non-constant element in constant CONSTRUCTOR");
1998 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1999 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2003 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2004 are in the vec pointed to by VALS. */
2005 tree
2006 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
2008 tree c = make_node (CONSTRUCTOR);
2010 TREE_TYPE (c) = type;
2011 CONSTRUCTOR_ELTS (c) = vals;
2013 recompute_constructor_flags (c);
2015 return c;
2018 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2019 INDEX and VALUE. */
2020 tree
2021 build_constructor_single (tree type, tree index, tree value)
2023 vec<constructor_elt, va_gc> *v;
2024 constructor_elt elt = {index, value};
2026 vec_alloc (v, 1);
2027 v->quick_push (elt);
2029 return build_constructor (type, v);
2033 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2034 are in a list pointed to by VALS. */
2035 tree
2036 build_constructor_from_list (tree type, tree vals)
2038 tree t;
2039 vec<constructor_elt, va_gc> *v = NULL;
2041 if (vals)
2043 vec_alloc (v, list_length (vals));
2044 for (t = vals; t; t = TREE_CHAIN (t))
2045 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2048 return build_constructor (type, v);
2051 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2052 of elements, provided as index/value pairs. */
2054 tree
2055 build_constructor_va (tree type, int nelts, ...)
2057 vec<constructor_elt, va_gc> *v = NULL;
2058 va_list p;
2060 va_start (p, nelts);
2061 vec_alloc (v, nelts);
2062 while (nelts--)
2064 tree index = va_arg (p, tree);
2065 tree value = va_arg (p, tree);
2066 CONSTRUCTOR_APPEND_ELT (v, index, value);
2068 va_end (p);
2069 return build_constructor (type, v);
2072 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2074 tree
2075 build_clobber (tree type)
2077 tree clobber = build_constructor (type, NULL);
2078 TREE_THIS_VOLATILE (clobber) = true;
2079 return clobber;
2082 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2084 tree
2085 build_fixed (tree type, FIXED_VALUE_TYPE f)
2087 tree v;
2088 FIXED_VALUE_TYPE *fp;
2090 v = make_node (FIXED_CST);
2091 fp = ggc_alloc<fixed_value> ();
2092 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2094 TREE_TYPE (v) = type;
2095 TREE_FIXED_CST_PTR (v) = fp;
2096 return v;
2099 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2101 tree
2102 build_real (tree type, REAL_VALUE_TYPE d)
2104 tree v;
2105 REAL_VALUE_TYPE *dp;
2106 int overflow = 0;
2108 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2109 Consider doing it via real_convert now. */
2111 v = make_node (REAL_CST);
2112 dp = ggc_alloc<real_value> ();
2113 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2115 TREE_TYPE (v) = type;
2116 TREE_REAL_CST_PTR (v) = dp;
2117 TREE_OVERFLOW (v) = overflow;
2118 return v;
2121 /* Like build_real, but first truncate D to the type. */
2123 tree
2124 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2126 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2129 /* Return a new REAL_CST node whose type is TYPE
2130 and whose value is the integer value of the INTEGER_CST node I. */
2132 REAL_VALUE_TYPE
2133 real_value_from_int_cst (const_tree type, const_tree i)
2135 REAL_VALUE_TYPE d;
2137 /* Clear all bits of the real value type so that we can later do
2138 bitwise comparisons to see if two values are the same. */
2139 memset (&d, 0, sizeof d);
2141 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2142 TYPE_SIGN (TREE_TYPE (i)));
2143 return d;
2146 /* Given a tree representing an integer constant I, return a tree
2147 representing the same value as a floating-point constant of type TYPE. */
2149 tree
2150 build_real_from_int_cst (tree type, const_tree i)
2152 tree v;
2153 int overflow = TREE_OVERFLOW (i);
2155 v = build_real (type, real_value_from_int_cst (type, i));
2157 TREE_OVERFLOW (v) |= overflow;
2158 return v;
2161 /* Return a newly constructed STRING_CST node whose value is
2162 the LEN characters at STR.
2163 Note that for a C string literal, LEN should include the trailing NUL.
2164 The TREE_TYPE is not initialized. */
2166 tree
2167 build_string (int len, const char *str)
2169 tree s;
2170 size_t length;
2172 /* Do not waste bytes provided by padding of struct tree_string. */
2173 length = len + offsetof (struct tree_string, str) + 1;
2175 record_node_allocation_statistics (STRING_CST, length);
2177 s = (tree) ggc_internal_alloc (length);
2179 memset (s, 0, sizeof (struct tree_typed));
2180 TREE_SET_CODE (s, STRING_CST);
2181 TREE_CONSTANT (s) = 1;
2182 TREE_STRING_LENGTH (s) = len;
2183 memcpy (s->string.str, str, len);
2184 s->string.str[len] = '\0';
2186 return s;
2189 /* Return a newly constructed COMPLEX_CST node whose value is
2190 specified by the real and imaginary parts REAL and IMAG.
2191 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2192 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2194 tree
2195 build_complex (tree type, tree real, tree imag)
2197 tree t = make_node (COMPLEX_CST);
2199 TREE_REALPART (t) = real;
2200 TREE_IMAGPART (t) = imag;
2201 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2202 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2203 return t;
2206 /* Build a complex (inf +- 0i), such as for the result of cproj.
2207 TYPE is the complex tree type of the result. If NEG is true, the
2208 imaginary zero is negative. */
2210 tree
2211 build_complex_inf (tree type, bool neg)
2213 REAL_VALUE_TYPE rinf, rzero = dconst0;
2215 real_inf (&rinf);
2216 rzero.sign = neg;
2217 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2218 build_real (TREE_TYPE (type), rzero));
2221 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2222 element is set to 1. In particular, this is 1 + i for complex types. */
2224 tree
2225 build_each_one_cst (tree type)
2227 if (TREE_CODE (type) == COMPLEX_TYPE)
2229 tree scalar = build_one_cst (TREE_TYPE (type));
2230 return build_complex (type, scalar, scalar);
2232 else
2233 return build_one_cst (type);
2236 /* Return a constant of arithmetic type TYPE which is the
2237 multiplicative identity of the set TYPE. */
2239 tree
2240 build_one_cst (tree type)
2242 switch (TREE_CODE (type))
2244 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2245 case POINTER_TYPE: case REFERENCE_TYPE:
2246 case OFFSET_TYPE:
2247 return build_int_cst (type, 1);
2249 case REAL_TYPE:
2250 return build_real (type, dconst1);
2252 case FIXED_POINT_TYPE:
2253 /* We can only generate 1 for accum types. */
2254 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2255 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2257 case VECTOR_TYPE:
2259 tree scalar = build_one_cst (TREE_TYPE (type));
2261 return build_vector_from_val (type, scalar);
2264 case COMPLEX_TYPE:
2265 return build_complex (type,
2266 build_one_cst (TREE_TYPE (type)),
2267 build_zero_cst (TREE_TYPE (type)));
2269 default:
2270 gcc_unreachable ();
2274 /* Return an integer of type TYPE containing all 1's in as much precision as
2275 it contains, or a complex or vector whose subparts are such integers. */
2277 tree
2278 build_all_ones_cst (tree type)
2280 if (TREE_CODE (type) == COMPLEX_TYPE)
2282 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2283 return build_complex (type, scalar, scalar);
2285 else
2286 return build_minus_one_cst (type);
2289 /* Return a constant of arithmetic type TYPE which is the
2290 opposite of the multiplicative identity of the set TYPE. */
2292 tree
2293 build_minus_one_cst (tree type)
2295 switch (TREE_CODE (type))
2297 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2298 case POINTER_TYPE: case REFERENCE_TYPE:
2299 case OFFSET_TYPE:
2300 return build_int_cst (type, -1);
2302 case REAL_TYPE:
2303 return build_real (type, dconstm1);
2305 case FIXED_POINT_TYPE:
2306 /* We can only generate 1 for accum types. */
2307 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2308 return build_fixed (type,
2309 fixed_from_double_int (double_int_minus_one,
2310 SCALAR_TYPE_MODE (type)));
2312 case VECTOR_TYPE:
2314 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2316 return build_vector_from_val (type, scalar);
2319 case COMPLEX_TYPE:
2320 return build_complex (type,
2321 build_minus_one_cst (TREE_TYPE (type)),
2322 build_zero_cst (TREE_TYPE (type)));
2324 default:
2325 gcc_unreachable ();
2329 /* Build 0 constant of type TYPE. This is used by constructor folding
2330 and thus the constant should be represented in memory by
2331 zero(es). */
2333 tree
2334 build_zero_cst (tree type)
2336 switch (TREE_CODE (type))
2338 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2339 case POINTER_TYPE: case REFERENCE_TYPE:
2340 case OFFSET_TYPE: case NULLPTR_TYPE:
2341 return build_int_cst (type, 0);
2343 case REAL_TYPE:
2344 return build_real (type, dconst0);
2346 case FIXED_POINT_TYPE:
2347 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2349 case VECTOR_TYPE:
2351 tree scalar = build_zero_cst (TREE_TYPE (type));
2353 return build_vector_from_val (type, scalar);
2356 case COMPLEX_TYPE:
2358 tree zero = build_zero_cst (TREE_TYPE (type));
2360 return build_complex (type, zero, zero);
2363 default:
2364 if (!AGGREGATE_TYPE_P (type))
2365 return fold_convert (type, integer_zero_node);
2366 return build_constructor (type, NULL);
2371 /* Build a BINFO with LEN language slots. */
2373 tree
2374 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2376 tree t;
2377 size_t length = (offsetof (struct tree_binfo, base_binfos)
2378 + vec<tree, va_gc>::embedded_size (base_binfos));
2380 record_node_allocation_statistics (TREE_BINFO, length);
2382 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2384 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2386 TREE_SET_CODE (t, TREE_BINFO);
2388 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2390 return t;
2393 /* Create a CASE_LABEL_EXPR tree node and return it. */
2395 tree
2396 build_case_label (tree low_value, tree high_value, tree label_decl)
2398 tree t = make_node (CASE_LABEL_EXPR);
2400 TREE_TYPE (t) = void_type_node;
2401 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2403 CASE_LOW (t) = low_value;
2404 CASE_HIGH (t) = high_value;
2405 CASE_LABEL (t) = label_decl;
2406 CASE_CHAIN (t) = NULL_TREE;
2408 return t;
2411 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2412 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2413 The latter determines the length of the HOST_WIDE_INT vector. */
2415 tree
2416 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2418 tree t;
2419 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2420 + sizeof (struct tree_int_cst));
2422 gcc_assert (len);
2423 record_node_allocation_statistics (INTEGER_CST, length);
2425 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2427 TREE_SET_CODE (t, INTEGER_CST);
2428 TREE_INT_CST_NUNITS (t) = len;
2429 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2430 /* to_offset can only be applied to trees that are offset_int-sized
2431 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2432 must be exactly the precision of offset_int and so LEN is correct. */
2433 if (ext_len <= OFFSET_INT_ELTS)
2434 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2435 else
2436 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2438 TREE_CONSTANT (t) = 1;
2440 return t;
2443 /* Build a newly constructed TREE_VEC node of length LEN. */
2445 tree
2446 make_tree_vec (int len MEM_STAT_DECL)
2448 tree t;
2449 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2451 record_node_allocation_statistics (TREE_VEC, length);
2453 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2455 TREE_SET_CODE (t, TREE_VEC);
2456 TREE_VEC_LENGTH (t) = len;
2458 return t;
2461 /* Grow a TREE_VEC node to new length LEN. */
2463 tree
2464 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2466 gcc_assert (TREE_CODE (v) == TREE_VEC);
2468 int oldlen = TREE_VEC_LENGTH (v);
2469 gcc_assert (len > oldlen);
2471 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2472 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2474 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2476 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2478 TREE_VEC_LENGTH (v) = len;
2480 return v;
2483 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2484 fixed, and scalar, complex or vector. */
2486 bool
2487 zerop (const_tree expr)
2489 return (integer_zerop (expr)
2490 || real_zerop (expr)
2491 || fixed_zerop (expr));
2494 /* Return 1 if EXPR is the integer constant zero or a complex constant
2495 of zero. */
2497 bool
2498 integer_zerop (const_tree expr)
2500 switch (TREE_CODE (expr))
2502 case INTEGER_CST:
2503 return wi::to_wide (expr) == 0;
2504 case COMPLEX_CST:
2505 return (integer_zerop (TREE_REALPART (expr))
2506 && integer_zerop (TREE_IMAGPART (expr)));
2507 case VECTOR_CST:
2508 return (VECTOR_CST_NPATTERNS (expr) == 1
2509 && VECTOR_CST_DUPLICATE_P (expr)
2510 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2511 default:
2512 return false;
2516 /* Return 1 if EXPR is the integer constant one or the corresponding
2517 complex constant. */
2519 bool
2520 integer_onep (const_tree expr)
2522 switch (TREE_CODE (expr))
2524 case INTEGER_CST:
2525 return wi::eq_p (wi::to_widest (expr), 1);
2526 case COMPLEX_CST:
2527 return (integer_onep (TREE_REALPART (expr))
2528 && integer_zerop (TREE_IMAGPART (expr)));
2529 case VECTOR_CST:
2530 return (VECTOR_CST_NPATTERNS (expr) == 1
2531 && VECTOR_CST_DUPLICATE_P (expr)
2532 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2533 default:
2534 return false;
2538 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2539 return 1 if every piece is the integer constant one. */
2541 bool
2542 integer_each_onep (const_tree expr)
2544 if (TREE_CODE (expr) == COMPLEX_CST)
2545 return (integer_onep (TREE_REALPART (expr))
2546 && integer_onep (TREE_IMAGPART (expr)));
2547 else
2548 return integer_onep (expr);
2551 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2552 it contains, or a complex or vector whose subparts are such integers. */
2554 bool
2555 integer_all_onesp (const_tree expr)
2557 if (TREE_CODE (expr) == COMPLEX_CST
2558 && integer_all_onesp (TREE_REALPART (expr))
2559 && integer_all_onesp (TREE_IMAGPART (expr)))
2560 return true;
2562 else if (TREE_CODE (expr) == VECTOR_CST)
2563 return (VECTOR_CST_NPATTERNS (expr) == 1
2564 && VECTOR_CST_DUPLICATE_P (expr)
2565 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2567 else if (TREE_CODE (expr) != INTEGER_CST)
2568 return false;
2570 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2571 == wi::to_wide (expr));
2574 /* Return 1 if EXPR is the integer constant minus one. */
2576 bool
2577 integer_minus_onep (const_tree expr)
2579 if (TREE_CODE (expr) == COMPLEX_CST)
2580 return (integer_all_onesp (TREE_REALPART (expr))
2581 && integer_zerop (TREE_IMAGPART (expr)));
2582 else
2583 return integer_all_onesp (expr);
2586 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2587 one bit on). */
2589 bool
2590 integer_pow2p (const_tree expr)
2592 if (TREE_CODE (expr) == COMPLEX_CST
2593 && integer_pow2p (TREE_REALPART (expr))
2594 && integer_zerop (TREE_IMAGPART (expr)))
2595 return true;
2597 if (TREE_CODE (expr) != INTEGER_CST)
2598 return false;
2600 return wi::popcount (wi::to_wide (expr)) == 1;
2603 /* Return 1 if EXPR is an integer constant other than zero or a
2604 complex constant other than zero. */
2606 bool
2607 integer_nonzerop (const_tree expr)
2609 return ((TREE_CODE (expr) == INTEGER_CST
2610 && wi::to_wide (expr) != 0)
2611 || (TREE_CODE (expr) == COMPLEX_CST
2612 && (integer_nonzerop (TREE_REALPART (expr))
2613 || integer_nonzerop (TREE_IMAGPART (expr)))));
2616 /* Return 1 if EXPR is the integer constant one. For vector,
2617 return 1 if every piece is the integer constant minus one
2618 (representing the value TRUE). */
2620 bool
2621 integer_truep (const_tree expr)
2623 if (TREE_CODE (expr) == VECTOR_CST)
2624 return integer_all_onesp (expr);
2625 return integer_onep (expr);
2628 /* Return 1 if EXPR is the fixed-point constant zero. */
2630 bool
2631 fixed_zerop (const_tree expr)
2633 return (TREE_CODE (expr) == FIXED_CST
2634 && TREE_FIXED_CST (expr).data.is_zero ());
2637 /* Return the power of two represented by a tree node known to be a
2638 power of two. */
2641 tree_log2 (const_tree expr)
2643 if (TREE_CODE (expr) == COMPLEX_CST)
2644 return tree_log2 (TREE_REALPART (expr));
2646 return wi::exact_log2 (wi::to_wide (expr));
2649 /* Similar, but return the largest integer Y such that 2 ** Y is less
2650 than or equal to EXPR. */
2653 tree_floor_log2 (const_tree expr)
2655 if (TREE_CODE (expr) == COMPLEX_CST)
2656 return tree_log2 (TREE_REALPART (expr));
2658 return wi::floor_log2 (wi::to_wide (expr));
2661 /* Return number of known trailing zero bits in EXPR, or, if the value of
2662 EXPR is known to be zero, the precision of it's type. */
2664 unsigned int
2665 tree_ctz (const_tree expr)
2667 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2668 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2669 return 0;
2671 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2672 switch (TREE_CODE (expr))
2674 case INTEGER_CST:
2675 ret1 = wi::ctz (wi::to_wide (expr));
2676 return MIN (ret1, prec);
2677 case SSA_NAME:
2678 ret1 = wi::ctz (get_nonzero_bits (expr));
2679 return MIN (ret1, prec);
2680 case PLUS_EXPR:
2681 case MINUS_EXPR:
2682 case BIT_IOR_EXPR:
2683 case BIT_XOR_EXPR:
2684 case MIN_EXPR:
2685 case MAX_EXPR:
2686 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2687 if (ret1 == 0)
2688 return ret1;
2689 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2690 return MIN (ret1, ret2);
2691 case POINTER_PLUS_EXPR:
2692 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2693 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2694 /* Second operand is sizetype, which could be in theory
2695 wider than pointer's precision. Make sure we never
2696 return more than prec. */
2697 ret2 = MIN (ret2, prec);
2698 return MIN (ret1, ret2);
2699 case BIT_AND_EXPR:
2700 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2701 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2702 return MAX (ret1, ret2);
2703 case MULT_EXPR:
2704 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2705 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2706 return MIN (ret1 + ret2, prec);
2707 case LSHIFT_EXPR:
2708 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2709 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2710 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2712 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2713 return MIN (ret1 + ret2, prec);
2715 return ret1;
2716 case RSHIFT_EXPR:
2717 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2718 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2720 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2721 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2722 if (ret1 > ret2)
2723 return ret1 - ret2;
2725 return 0;
2726 case TRUNC_DIV_EXPR:
2727 case CEIL_DIV_EXPR:
2728 case FLOOR_DIV_EXPR:
2729 case ROUND_DIV_EXPR:
2730 case EXACT_DIV_EXPR:
2731 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2732 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2734 int l = tree_log2 (TREE_OPERAND (expr, 1));
2735 if (l >= 0)
2737 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2738 ret2 = l;
2739 if (ret1 > ret2)
2740 return ret1 - ret2;
2743 return 0;
2744 CASE_CONVERT:
2745 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2746 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2747 ret1 = prec;
2748 return MIN (ret1, prec);
2749 case SAVE_EXPR:
2750 return tree_ctz (TREE_OPERAND (expr, 0));
2751 case COND_EXPR:
2752 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2753 if (ret1 == 0)
2754 return 0;
2755 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2756 return MIN (ret1, ret2);
2757 case COMPOUND_EXPR:
2758 return tree_ctz (TREE_OPERAND (expr, 1));
2759 case ADDR_EXPR:
2760 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2761 if (ret1 > BITS_PER_UNIT)
2763 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2764 return MIN (ret1, prec);
2766 return 0;
2767 default:
2768 return 0;
2772 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2773 decimal float constants, so don't return 1 for them. */
2775 bool
2776 real_zerop (const_tree expr)
2778 switch (TREE_CODE (expr))
2780 case REAL_CST:
2781 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2782 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2783 case COMPLEX_CST:
2784 return real_zerop (TREE_REALPART (expr))
2785 && real_zerop (TREE_IMAGPART (expr));
2786 case VECTOR_CST:
2788 /* Don't simply check for a duplicate because the predicate
2789 accepts both +0.0 and -0.0. */
2790 unsigned count = vector_cst_encoded_nelts (expr);
2791 for (unsigned int i = 0; i < count; ++i)
2792 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2793 return false;
2794 return true;
2796 default:
2797 return false;
2801 /* Return 1 if EXPR is the real constant one in real or complex form.
2802 Trailing zeroes matter for decimal float constants, so don't return
2803 1 for them. */
2805 bool
2806 real_onep (const_tree expr)
2808 switch (TREE_CODE (expr))
2810 case REAL_CST:
2811 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2812 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2813 case COMPLEX_CST:
2814 return real_onep (TREE_REALPART (expr))
2815 && real_zerop (TREE_IMAGPART (expr));
2816 case VECTOR_CST:
2817 return (VECTOR_CST_NPATTERNS (expr) == 1
2818 && VECTOR_CST_DUPLICATE_P (expr)
2819 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2820 default:
2821 return false;
2825 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2826 matter for decimal float constants, so don't return 1 for them. */
2828 bool
2829 real_minus_onep (const_tree expr)
2831 switch (TREE_CODE (expr))
2833 case REAL_CST:
2834 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2835 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2836 case COMPLEX_CST:
2837 return real_minus_onep (TREE_REALPART (expr))
2838 && real_zerop (TREE_IMAGPART (expr));
2839 case VECTOR_CST:
2840 return (VECTOR_CST_NPATTERNS (expr) == 1
2841 && VECTOR_CST_DUPLICATE_P (expr)
2842 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2843 default:
2844 return false;
2848 /* Nonzero if EXP is a constant or a cast of a constant. */
2850 bool
2851 really_constant_p (const_tree exp)
2853 /* This is not quite the same as STRIP_NOPS. It does more. */
2854 while (CONVERT_EXPR_P (exp)
2855 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2856 exp = TREE_OPERAND (exp, 0);
2857 return TREE_CONSTANT (exp);
2860 /* Return true if T holds a polynomial pointer difference, storing it in
2861 *VALUE if so. A true return means that T's precision is no greater
2862 than 64 bits, which is the largest address space we support, so *VALUE
2863 never loses precision. However, the signedness of the result does
2864 not necessarily match the signedness of T: sometimes an unsigned type
2865 like sizetype is used to encode a value that is actually negative. */
2867 bool
2868 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2870 if (!t)
2871 return false;
2872 if (TREE_CODE (t) == INTEGER_CST)
2874 if (!cst_and_fits_in_hwi (t))
2875 return false;
2876 *value = int_cst_value (t);
2877 return true;
2879 if (POLY_INT_CST_P (t))
2881 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2882 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2883 return false;
2884 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2885 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2886 return true;
2888 return false;
2891 poly_int64
2892 tree_to_poly_int64 (const_tree t)
2894 gcc_assert (tree_fits_poly_int64_p (t));
2895 if (POLY_INT_CST_P (t))
2896 return poly_int_cst_value (t).force_shwi ();
2897 return TREE_INT_CST_LOW (t);
2900 poly_uint64
2901 tree_to_poly_uint64 (const_tree t)
2903 gcc_assert (tree_fits_poly_uint64_p (t));
2904 if (POLY_INT_CST_P (t))
2905 return poly_int_cst_value (t).force_uhwi ();
2906 return TREE_INT_CST_LOW (t);
2909 /* Return first list element whose TREE_VALUE is ELEM.
2910 Return 0 if ELEM is not in LIST. */
2912 tree
2913 value_member (tree elem, tree list)
2915 while (list)
2917 if (elem == TREE_VALUE (list))
2918 return list;
2919 list = TREE_CHAIN (list);
2921 return NULL_TREE;
2924 /* Return first list element whose TREE_PURPOSE is ELEM.
2925 Return 0 if ELEM is not in LIST. */
2927 tree
2928 purpose_member (const_tree elem, tree list)
2930 while (list)
2932 if (elem == TREE_PURPOSE (list))
2933 return list;
2934 list = TREE_CHAIN (list);
2936 return NULL_TREE;
2939 /* Return true if ELEM is in V. */
2941 bool
2942 vec_member (const_tree elem, vec<tree, va_gc> *v)
2944 unsigned ix;
2945 tree t;
2946 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2947 if (elem == t)
2948 return true;
2949 return false;
2952 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2953 NULL_TREE. */
2955 tree
2956 chain_index (int idx, tree chain)
2958 for (; chain && idx > 0; --idx)
2959 chain = TREE_CHAIN (chain);
2960 return chain;
2963 /* Return nonzero if ELEM is part of the chain CHAIN. */
2965 bool
2966 chain_member (const_tree elem, const_tree chain)
2968 while (chain)
2970 if (elem == chain)
2971 return true;
2972 chain = DECL_CHAIN (chain);
2975 return false;
2978 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2979 We expect a null pointer to mark the end of the chain.
2980 This is the Lisp primitive `length'. */
2983 list_length (const_tree t)
2985 const_tree p = t;
2986 #ifdef ENABLE_TREE_CHECKING
2987 const_tree q = t;
2988 #endif
2989 int len = 0;
2991 while (p)
2993 p = TREE_CHAIN (p);
2994 #ifdef ENABLE_TREE_CHECKING
2995 if (len % 2)
2996 q = TREE_CHAIN (q);
2997 gcc_assert (p != q);
2998 #endif
2999 len++;
3002 return len;
3005 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3006 UNION_TYPE TYPE, or NULL_TREE if none. */
3008 tree
3009 first_field (const_tree type)
3011 tree t = TYPE_FIELDS (type);
3012 while (t && TREE_CODE (t) != FIELD_DECL)
3013 t = TREE_CHAIN (t);
3014 return t;
3017 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3018 by modifying the last node in chain 1 to point to chain 2.
3019 This is the Lisp primitive `nconc'. */
3021 tree
3022 chainon (tree op1, tree op2)
3024 tree t1;
3026 if (!op1)
3027 return op2;
3028 if (!op2)
3029 return op1;
3031 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3032 continue;
3033 TREE_CHAIN (t1) = op2;
3035 #ifdef ENABLE_TREE_CHECKING
3037 tree t2;
3038 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3039 gcc_assert (t2 != t1);
3041 #endif
3043 return op1;
3046 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3048 tree
3049 tree_last (tree chain)
3051 tree next;
3052 if (chain)
3053 while ((next = TREE_CHAIN (chain)))
3054 chain = next;
3055 return chain;
3058 /* Reverse the order of elements in the chain T,
3059 and return the new head of the chain (old last element). */
3061 tree
3062 nreverse (tree t)
3064 tree prev = 0, decl, next;
3065 for (decl = t; decl; decl = next)
3067 /* We shouldn't be using this function to reverse BLOCK chains; we
3068 have blocks_nreverse for that. */
3069 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3070 next = TREE_CHAIN (decl);
3071 TREE_CHAIN (decl) = prev;
3072 prev = decl;
3074 return prev;
3077 /* Return a newly created TREE_LIST node whose
3078 purpose and value fields are PARM and VALUE. */
3080 tree
3081 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3083 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3084 TREE_PURPOSE (t) = parm;
3085 TREE_VALUE (t) = value;
3086 return t;
3089 /* Build a chain of TREE_LIST nodes from a vector. */
3091 tree
3092 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3094 tree ret = NULL_TREE;
3095 tree *pp = &ret;
3096 unsigned int i;
3097 tree t;
3098 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3100 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3101 pp = &TREE_CHAIN (*pp);
3103 return ret;
3106 /* Return a newly created TREE_LIST node whose
3107 purpose and value fields are PURPOSE and VALUE
3108 and whose TREE_CHAIN is CHAIN. */
3110 tree
3111 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3113 tree node;
3115 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3116 memset (node, 0, sizeof (struct tree_common));
3118 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3120 TREE_SET_CODE (node, TREE_LIST);
3121 TREE_CHAIN (node) = chain;
3122 TREE_PURPOSE (node) = purpose;
3123 TREE_VALUE (node) = value;
3124 return node;
3127 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3128 trees. */
3130 vec<tree, va_gc> *
3131 ctor_to_vec (tree ctor)
3133 vec<tree, va_gc> *vec;
3134 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3135 unsigned int ix;
3136 tree val;
3138 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3139 vec->quick_push (val);
3141 return vec;
3144 /* Return the size nominally occupied by an object of type TYPE
3145 when it resides in memory. The value is measured in units of bytes,
3146 and its data type is that normally used for type sizes
3147 (which is the first type created by make_signed_type or
3148 make_unsigned_type). */
3150 tree
3151 size_in_bytes_loc (location_t loc, const_tree type)
3153 tree t;
3155 if (type == error_mark_node)
3156 return integer_zero_node;
3158 type = TYPE_MAIN_VARIANT (type);
3159 t = TYPE_SIZE_UNIT (type);
3161 if (t == 0)
3163 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3164 return size_zero_node;
3167 return t;
3170 /* Return the size of TYPE (in bytes) as a wide integer
3171 or return -1 if the size can vary or is larger than an integer. */
3173 HOST_WIDE_INT
3174 int_size_in_bytes (const_tree type)
3176 tree t;
3178 if (type == error_mark_node)
3179 return 0;
3181 type = TYPE_MAIN_VARIANT (type);
3182 t = TYPE_SIZE_UNIT (type);
3184 if (t && tree_fits_uhwi_p (t))
3185 return TREE_INT_CST_LOW (t);
3186 else
3187 return -1;
3190 /* Return the maximum size of TYPE (in bytes) as a wide integer
3191 or return -1 if the size can vary or is larger than an integer. */
3193 HOST_WIDE_INT
3194 max_int_size_in_bytes (const_tree type)
3196 HOST_WIDE_INT size = -1;
3197 tree size_tree;
3199 /* If this is an array type, check for a possible MAX_SIZE attached. */
3201 if (TREE_CODE (type) == ARRAY_TYPE)
3203 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3205 if (size_tree && tree_fits_uhwi_p (size_tree))
3206 size = tree_to_uhwi (size_tree);
3209 /* If we still haven't been able to get a size, see if the language
3210 can compute a maximum size. */
3212 if (size == -1)
3214 size_tree = lang_hooks.types.max_size (type);
3216 if (size_tree && tree_fits_uhwi_p (size_tree))
3217 size = tree_to_uhwi (size_tree);
3220 return size;
3223 /* Return the bit position of FIELD, in bits from the start of the record.
3224 This is a tree of type bitsizetype. */
3226 tree
3227 bit_position (const_tree field)
3229 return bit_from_pos (DECL_FIELD_OFFSET (field),
3230 DECL_FIELD_BIT_OFFSET (field));
3233 /* Return the byte position of FIELD, in bytes from the start of the record.
3234 This is a tree of type sizetype. */
3236 tree
3237 byte_position (const_tree field)
3239 return byte_from_pos (DECL_FIELD_OFFSET (field),
3240 DECL_FIELD_BIT_OFFSET (field));
3243 /* Likewise, but return as an integer. It must be representable in
3244 that way (since it could be a signed value, we don't have the
3245 option of returning -1 like int_size_in_byte can. */
3247 HOST_WIDE_INT
3248 int_byte_position (const_tree field)
3250 return tree_to_shwi (byte_position (field));
3253 /* Return the strictest alignment, in bits, that T is known to have. */
3255 unsigned int
3256 expr_align (const_tree t)
3258 unsigned int align0, align1;
3260 switch (TREE_CODE (t))
3262 CASE_CONVERT: case NON_LVALUE_EXPR:
3263 /* If we have conversions, we know that the alignment of the
3264 object must meet each of the alignments of the types. */
3265 align0 = expr_align (TREE_OPERAND (t, 0));
3266 align1 = TYPE_ALIGN (TREE_TYPE (t));
3267 return MAX (align0, align1);
3269 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3270 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3271 case CLEANUP_POINT_EXPR:
3272 /* These don't change the alignment of an object. */
3273 return expr_align (TREE_OPERAND (t, 0));
3275 case COND_EXPR:
3276 /* The best we can do is say that the alignment is the least aligned
3277 of the two arms. */
3278 align0 = expr_align (TREE_OPERAND (t, 1));
3279 align1 = expr_align (TREE_OPERAND (t, 2));
3280 return MIN (align0, align1);
3282 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3283 meaningfully, it's always 1. */
3284 case LABEL_DECL: case CONST_DECL:
3285 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3286 case FUNCTION_DECL:
3287 gcc_assert (DECL_ALIGN (t) != 0);
3288 return DECL_ALIGN (t);
3290 default:
3291 break;
3294 /* Otherwise take the alignment from that of the type. */
3295 return TYPE_ALIGN (TREE_TYPE (t));
3298 /* Return, as a tree node, the number of elements for TYPE (which is an
3299 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3301 tree
3302 array_type_nelts (const_tree type)
3304 tree index_type, min, max;
3306 /* If they did it with unspecified bounds, then we should have already
3307 given an error about it before we got here. */
3308 if (! TYPE_DOMAIN (type))
3309 return error_mark_node;
3311 index_type = TYPE_DOMAIN (type);
3312 min = TYPE_MIN_VALUE (index_type);
3313 max = TYPE_MAX_VALUE (index_type);
3315 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3316 if (!max)
3317 return error_mark_node;
3319 return (integer_zerop (min)
3320 ? max
3321 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3324 /* If arg is static -- a reference to an object in static storage -- then
3325 return the object. This is not the same as the C meaning of `static'.
3326 If arg isn't static, return NULL. */
3328 tree
3329 staticp (tree arg)
3331 switch (TREE_CODE (arg))
3333 case FUNCTION_DECL:
3334 /* Nested functions are static, even though taking their address will
3335 involve a trampoline as we unnest the nested function and create
3336 the trampoline on the tree level. */
3337 return arg;
3339 case VAR_DECL:
3340 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3341 && ! DECL_THREAD_LOCAL_P (arg)
3342 && ! DECL_DLLIMPORT_P (arg)
3343 ? arg : NULL);
3345 case CONST_DECL:
3346 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3347 ? arg : NULL);
3349 case CONSTRUCTOR:
3350 return TREE_STATIC (arg) ? arg : NULL;
3352 case LABEL_DECL:
3353 case STRING_CST:
3354 return arg;
3356 case COMPONENT_REF:
3357 /* If the thing being referenced is not a field, then it is
3358 something language specific. */
3359 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3361 /* If we are referencing a bitfield, we can't evaluate an
3362 ADDR_EXPR at compile time and so it isn't a constant. */
3363 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3364 return NULL;
3366 return staticp (TREE_OPERAND (arg, 0));
3368 case BIT_FIELD_REF:
3369 return NULL;
3371 case INDIRECT_REF:
3372 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3374 case ARRAY_REF:
3375 case ARRAY_RANGE_REF:
3376 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3377 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3378 return staticp (TREE_OPERAND (arg, 0));
3379 else
3380 return NULL;
3382 case COMPOUND_LITERAL_EXPR:
3383 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3385 default:
3386 return NULL;
3393 /* Return whether OP is a DECL whose address is function-invariant. */
3395 bool
3396 decl_address_invariant_p (const_tree op)
3398 /* The conditions below are slightly less strict than the one in
3399 staticp. */
3401 switch (TREE_CODE (op))
3403 case PARM_DECL:
3404 case RESULT_DECL:
3405 case LABEL_DECL:
3406 case FUNCTION_DECL:
3407 return true;
3409 case VAR_DECL:
3410 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3411 || DECL_THREAD_LOCAL_P (op)
3412 || DECL_CONTEXT (op) == current_function_decl
3413 || decl_function_context (op) == current_function_decl)
3414 return true;
3415 break;
3417 case CONST_DECL:
3418 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3419 || decl_function_context (op) == current_function_decl)
3420 return true;
3421 break;
3423 default:
3424 break;
3427 return false;
3430 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3432 bool
3433 decl_address_ip_invariant_p (const_tree op)
3435 /* The conditions below are slightly less strict than the one in
3436 staticp. */
3438 switch (TREE_CODE (op))
3440 case LABEL_DECL:
3441 case FUNCTION_DECL:
3442 case STRING_CST:
3443 return true;
3445 case VAR_DECL:
3446 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3447 && !DECL_DLLIMPORT_P (op))
3448 || DECL_THREAD_LOCAL_P (op))
3449 return true;
3450 break;
3452 case CONST_DECL:
3453 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3454 return true;
3455 break;
3457 default:
3458 break;
3461 return false;
3465 /* Return true if T is function-invariant (internal function, does
3466 not handle arithmetic; that's handled in skip_simple_arithmetic and
3467 tree_invariant_p). */
3469 static bool
3470 tree_invariant_p_1 (tree t)
3472 tree op;
3474 if (TREE_CONSTANT (t)
3475 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3476 return true;
3478 switch (TREE_CODE (t))
3480 case SAVE_EXPR:
3481 return true;
3483 case ADDR_EXPR:
3484 op = TREE_OPERAND (t, 0);
3485 while (handled_component_p (op))
3487 switch (TREE_CODE (op))
3489 case ARRAY_REF:
3490 case ARRAY_RANGE_REF:
3491 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3492 || TREE_OPERAND (op, 2) != NULL_TREE
3493 || TREE_OPERAND (op, 3) != NULL_TREE)
3494 return false;
3495 break;
3497 case COMPONENT_REF:
3498 if (TREE_OPERAND (op, 2) != NULL_TREE)
3499 return false;
3500 break;
3502 default:;
3504 op = TREE_OPERAND (op, 0);
3507 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3509 default:
3510 break;
3513 return false;
3516 /* Return true if T is function-invariant. */
3518 bool
3519 tree_invariant_p (tree t)
3521 tree inner = skip_simple_arithmetic (t);
3522 return tree_invariant_p_1 (inner);
3525 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3526 Do this to any expression which may be used in more than one place,
3527 but must be evaluated only once.
3529 Normally, expand_expr would reevaluate the expression each time.
3530 Calling save_expr produces something that is evaluated and recorded
3531 the first time expand_expr is called on it. Subsequent calls to
3532 expand_expr just reuse the recorded value.
3534 The call to expand_expr that generates code that actually computes
3535 the value is the first call *at compile time*. Subsequent calls
3536 *at compile time* generate code to use the saved value.
3537 This produces correct result provided that *at run time* control
3538 always flows through the insns made by the first expand_expr
3539 before reaching the other places where the save_expr was evaluated.
3540 You, the caller of save_expr, must make sure this is so.
3542 Constants, and certain read-only nodes, are returned with no
3543 SAVE_EXPR because that is safe. Expressions containing placeholders
3544 are not touched; see tree.def for an explanation of what these
3545 are used for. */
3547 tree
3548 save_expr (tree expr)
3550 tree inner;
3552 /* If the tree evaluates to a constant, then we don't want to hide that
3553 fact (i.e. this allows further folding, and direct checks for constants).
3554 However, a read-only object that has side effects cannot be bypassed.
3555 Since it is no problem to reevaluate literals, we just return the
3556 literal node. */
3557 inner = skip_simple_arithmetic (expr);
3558 if (TREE_CODE (inner) == ERROR_MARK)
3559 return inner;
3561 if (tree_invariant_p_1 (inner))
3562 return expr;
3564 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3565 it means that the size or offset of some field of an object depends on
3566 the value within another field.
3568 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3569 and some variable since it would then need to be both evaluated once and
3570 evaluated more than once. Front-ends must assure this case cannot
3571 happen by surrounding any such subexpressions in their own SAVE_EXPR
3572 and forcing evaluation at the proper time. */
3573 if (contains_placeholder_p (inner))
3574 return expr;
3576 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3578 /* This expression might be placed ahead of a jump to ensure that the
3579 value was computed on both sides of the jump. So make sure it isn't
3580 eliminated as dead. */
3581 TREE_SIDE_EFFECTS (expr) = 1;
3582 return expr;
3585 /* Look inside EXPR into any simple arithmetic operations. Return the
3586 outermost non-arithmetic or non-invariant node. */
3588 tree
3589 skip_simple_arithmetic (tree expr)
3591 /* We don't care about whether this can be used as an lvalue in this
3592 context. */
3593 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3594 expr = TREE_OPERAND (expr, 0);
3596 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3597 a constant, it will be more efficient to not make another SAVE_EXPR since
3598 it will allow better simplification and GCSE will be able to merge the
3599 computations if they actually occur. */
3600 while (true)
3602 if (UNARY_CLASS_P (expr))
3603 expr = TREE_OPERAND (expr, 0);
3604 else if (BINARY_CLASS_P (expr))
3606 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3607 expr = TREE_OPERAND (expr, 0);
3608 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3609 expr = TREE_OPERAND (expr, 1);
3610 else
3611 break;
3613 else
3614 break;
3617 return expr;
3620 /* Look inside EXPR into simple arithmetic operations involving constants.
3621 Return the outermost non-arithmetic or non-constant node. */
3623 tree
3624 skip_simple_constant_arithmetic (tree expr)
3626 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3627 expr = TREE_OPERAND (expr, 0);
3629 while (true)
3631 if (UNARY_CLASS_P (expr))
3632 expr = TREE_OPERAND (expr, 0);
3633 else if (BINARY_CLASS_P (expr))
3635 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3636 expr = TREE_OPERAND (expr, 0);
3637 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3638 expr = TREE_OPERAND (expr, 1);
3639 else
3640 break;
3642 else
3643 break;
3646 return expr;
3649 /* Return which tree structure is used by T. */
3651 enum tree_node_structure_enum
3652 tree_node_structure (const_tree t)
3654 const enum tree_code code = TREE_CODE (t);
3655 return tree_node_structure_for_code (code);
3658 /* Set various status flags when building a CALL_EXPR object T. */
3660 static void
3661 process_call_operands (tree t)
3663 bool side_effects = TREE_SIDE_EFFECTS (t);
3664 bool read_only = false;
3665 int i = call_expr_flags (t);
3667 /* Calls have side-effects, except those to const or pure functions. */
3668 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3669 side_effects = true;
3670 /* Propagate TREE_READONLY of arguments for const functions. */
3671 if (i & ECF_CONST)
3672 read_only = true;
3674 if (!side_effects || read_only)
3675 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3677 tree op = TREE_OPERAND (t, i);
3678 if (op && TREE_SIDE_EFFECTS (op))
3679 side_effects = true;
3680 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3681 read_only = false;
3684 TREE_SIDE_EFFECTS (t) = side_effects;
3685 TREE_READONLY (t) = read_only;
3688 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3689 size or offset that depends on a field within a record. */
3691 bool
3692 contains_placeholder_p (const_tree exp)
3694 enum tree_code code;
3696 if (!exp)
3697 return 0;
3699 code = TREE_CODE (exp);
3700 if (code == PLACEHOLDER_EXPR)
3701 return 1;
3703 switch (TREE_CODE_CLASS (code))
3705 case tcc_reference:
3706 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3707 position computations since they will be converted into a
3708 WITH_RECORD_EXPR involving the reference, which will assume
3709 here will be valid. */
3710 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3712 case tcc_exceptional:
3713 if (code == TREE_LIST)
3714 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3715 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3716 break;
3718 case tcc_unary:
3719 case tcc_binary:
3720 case tcc_comparison:
3721 case tcc_expression:
3722 switch (code)
3724 case COMPOUND_EXPR:
3725 /* Ignoring the first operand isn't quite right, but works best. */
3726 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3728 case COND_EXPR:
3729 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3730 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3731 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3733 case SAVE_EXPR:
3734 /* The save_expr function never wraps anything containing
3735 a PLACEHOLDER_EXPR. */
3736 return 0;
3738 default:
3739 break;
3742 switch (TREE_CODE_LENGTH (code))
3744 case 1:
3745 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3746 case 2:
3747 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3748 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3749 default:
3750 return 0;
3753 case tcc_vl_exp:
3754 switch (code)
3756 case CALL_EXPR:
3758 const_tree arg;
3759 const_call_expr_arg_iterator iter;
3760 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3761 if (CONTAINS_PLACEHOLDER_P (arg))
3762 return 1;
3763 return 0;
3765 default:
3766 return 0;
3769 default:
3770 return 0;
3772 return 0;
3775 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3776 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3777 field positions. */
3779 static bool
3780 type_contains_placeholder_1 (const_tree type)
3782 /* If the size contains a placeholder or the parent type (component type in
3783 the case of arrays) type involves a placeholder, this type does. */
3784 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3785 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3786 || (!POINTER_TYPE_P (type)
3787 && TREE_TYPE (type)
3788 && type_contains_placeholder_p (TREE_TYPE (type))))
3789 return true;
3791 /* Now do type-specific checks. Note that the last part of the check above
3792 greatly limits what we have to do below. */
3793 switch (TREE_CODE (type))
3795 case VOID_TYPE:
3796 case COMPLEX_TYPE:
3797 case ENUMERAL_TYPE:
3798 case BOOLEAN_TYPE:
3799 case POINTER_TYPE:
3800 case OFFSET_TYPE:
3801 case REFERENCE_TYPE:
3802 case METHOD_TYPE:
3803 case FUNCTION_TYPE:
3804 case VECTOR_TYPE:
3805 case NULLPTR_TYPE:
3806 return false;
3808 case INTEGER_TYPE:
3809 case REAL_TYPE:
3810 case FIXED_POINT_TYPE:
3811 /* Here we just check the bounds. */
3812 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3813 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3815 case ARRAY_TYPE:
3816 /* We have already checked the component type above, so just check
3817 the domain type. Flexible array members have a null domain. */
3818 return TYPE_DOMAIN (type) ?
3819 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3821 case RECORD_TYPE:
3822 case UNION_TYPE:
3823 case QUAL_UNION_TYPE:
3825 tree field;
3827 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3828 if (TREE_CODE (field) == FIELD_DECL
3829 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3830 || (TREE_CODE (type) == QUAL_UNION_TYPE
3831 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3832 || type_contains_placeholder_p (TREE_TYPE (field))))
3833 return true;
3835 return false;
3838 default:
3839 gcc_unreachable ();
3843 /* Wrapper around above function used to cache its result. */
3845 bool
3846 type_contains_placeholder_p (tree type)
3848 bool result;
3850 /* If the contains_placeholder_bits field has been initialized,
3851 then we know the answer. */
3852 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3853 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3855 /* Indicate that we've seen this type node, and the answer is false.
3856 This is what we want to return if we run into recursion via fields. */
3857 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3859 /* Compute the real value. */
3860 result = type_contains_placeholder_1 (type);
3862 /* Store the real value. */
3863 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3865 return result;
3868 /* Push tree EXP onto vector QUEUE if it is not already present. */
3870 static void
3871 push_without_duplicates (tree exp, vec<tree> *queue)
3873 unsigned int i;
3874 tree iter;
3876 FOR_EACH_VEC_ELT (*queue, i, iter)
3877 if (simple_cst_equal (iter, exp) == 1)
3878 break;
3880 if (!iter)
3881 queue->safe_push (exp);
3884 /* Given a tree EXP, find all occurrences of references to fields
3885 in a PLACEHOLDER_EXPR and place them in vector REFS without
3886 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3887 we assume here that EXP contains only arithmetic expressions
3888 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3889 argument list. */
3891 void
3892 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3894 enum tree_code code = TREE_CODE (exp);
3895 tree inner;
3896 int i;
3898 /* We handle TREE_LIST and COMPONENT_REF separately. */
3899 if (code == TREE_LIST)
3901 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3902 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3904 else if (code == COMPONENT_REF)
3906 for (inner = TREE_OPERAND (exp, 0);
3907 REFERENCE_CLASS_P (inner);
3908 inner = TREE_OPERAND (inner, 0))
3911 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3912 push_without_duplicates (exp, refs);
3913 else
3914 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3916 else
3917 switch (TREE_CODE_CLASS (code))
3919 case tcc_constant:
3920 break;
3922 case tcc_declaration:
3923 /* Variables allocated to static storage can stay. */
3924 if (!TREE_STATIC (exp))
3925 push_without_duplicates (exp, refs);
3926 break;
3928 case tcc_expression:
3929 /* This is the pattern built in ada/make_aligning_type. */
3930 if (code == ADDR_EXPR
3931 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3933 push_without_duplicates (exp, refs);
3934 break;
3937 /* Fall through. */
3939 case tcc_exceptional:
3940 case tcc_unary:
3941 case tcc_binary:
3942 case tcc_comparison:
3943 case tcc_reference:
3944 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3945 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3946 break;
3948 case tcc_vl_exp:
3949 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3950 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3951 break;
3953 default:
3954 gcc_unreachable ();
3958 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3959 return a tree with all occurrences of references to F in a
3960 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3961 CONST_DECLs. Note that we assume here that EXP contains only
3962 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3963 occurring only in their argument list. */
3965 tree
3966 substitute_in_expr (tree exp, tree f, tree r)
3968 enum tree_code code = TREE_CODE (exp);
3969 tree op0, op1, op2, op3;
3970 tree new_tree;
3972 /* We handle TREE_LIST and COMPONENT_REF separately. */
3973 if (code == TREE_LIST)
3975 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3976 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3977 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3978 return exp;
3980 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3982 else if (code == COMPONENT_REF)
3984 tree inner;
3986 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3987 and it is the right field, replace it with R. */
3988 for (inner = TREE_OPERAND (exp, 0);
3989 REFERENCE_CLASS_P (inner);
3990 inner = TREE_OPERAND (inner, 0))
3993 /* The field. */
3994 op1 = TREE_OPERAND (exp, 1);
3996 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3997 return r;
3999 /* If this expression hasn't been completed let, leave it alone. */
4000 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4001 return exp;
4003 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4004 if (op0 == TREE_OPERAND (exp, 0))
4005 return exp;
4007 new_tree
4008 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4010 else
4011 switch (TREE_CODE_CLASS (code))
4013 case tcc_constant:
4014 return exp;
4016 case tcc_declaration:
4017 if (exp == f)
4018 return r;
4019 else
4020 return exp;
4022 case tcc_expression:
4023 if (exp == f)
4024 return r;
4026 /* Fall through. */
4028 case tcc_exceptional:
4029 case tcc_unary:
4030 case tcc_binary:
4031 case tcc_comparison:
4032 case tcc_reference:
4033 switch (TREE_CODE_LENGTH (code))
4035 case 0:
4036 return exp;
4038 case 1:
4039 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4040 if (op0 == TREE_OPERAND (exp, 0))
4041 return exp;
4043 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4044 break;
4046 case 2:
4047 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4048 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4050 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4051 return exp;
4053 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4054 break;
4056 case 3:
4057 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4058 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4059 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4061 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4062 && op2 == TREE_OPERAND (exp, 2))
4063 return exp;
4065 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4066 break;
4068 case 4:
4069 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4070 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4071 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4072 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4074 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4075 && op2 == TREE_OPERAND (exp, 2)
4076 && op3 == TREE_OPERAND (exp, 3))
4077 return exp;
4079 new_tree
4080 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4081 break;
4083 default:
4084 gcc_unreachable ();
4086 break;
4088 case tcc_vl_exp:
4090 int i;
4092 new_tree = NULL_TREE;
4094 /* If we are trying to replace F with a constant or with another
4095 instance of one of the arguments of the call, inline back
4096 functions which do nothing else than computing a value from
4097 the arguments they are passed. This makes it possible to
4098 fold partially or entirely the replacement expression. */
4099 if (code == CALL_EXPR)
4101 bool maybe_inline = false;
4102 if (CONSTANT_CLASS_P (r))
4103 maybe_inline = true;
4104 else
4105 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4106 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4108 maybe_inline = true;
4109 break;
4111 if (maybe_inline)
4113 tree t = maybe_inline_call_in_expr (exp);
4114 if (t)
4115 return SUBSTITUTE_IN_EXPR (t, f, r);
4119 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4121 tree op = TREE_OPERAND (exp, i);
4122 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4123 if (new_op != op)
4125 if (!new_tree)
4126 new_tree = copy_node (exp);
4127 TREE_OPERAND (new_tree, i) = new_op;
4131 if (new_tree)
4133 new_tree = fold (new_tree);
4134 if (TREE_CODE (new_tree) == CALL_EXPR)
4135 process_call_operands (new_tree);
4137 else
4138 return exp;
4140 break;
4142 default:
4143 gcc_unreachable ();
4146 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4148 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4149 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4151 return new_tree;
4154 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4155 for it within OBJ, a tree that is an object or a chain of references. */
4157 tree
4158 substitute_placeholder_in_expr (tree exp, tree obj)
4160 enum tree_code code = TREE_CODE (exp);
4161 tree op0, op1, op2, op3;
4162 tree new_tree;
4164 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4165 in the chain of OBJ. */
4166 if (code == PLACEHOLDER_EXPR)
4168 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4169 tree elt;
4171 for (elt = obj; elt != 0;
4172 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4173 || TREE_CODE (elt) == COND_EXPR)
4174 ? TREE_OPERAND (elt, 1)
4175 : (REFERENCE_CLASS_P (elt)
4176 || UNARY_CLASS_P (elt)
4177 || BINARY_CLASS_P (elt)
4178 || VL_EXP_CLASS_P (elt)
4179 || EXPRESSION_CLASS_P (elt))
4180 ? TREE_OPERAND (elt, 0) : 0))
4181 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4182 return elt;
4184 for (elt = obj; elt != 0;
4185 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4186 || TREE_CODE (elt) == COND_EXPR)
4187 ? TREE_OPERAND (elt, 1)
4188 : (REFERENCE_CLASS_P (elt)
4189 || UNARY_CLASS_P (elt)
4190 || BINARY_CLASS_P (elt)
4191 || VL_EXP_CLASS_P (elt)
4192 || EXPRESSION_CLASS_P (elt))
4193 ? TREE_OPERAND (elt, 0) : 0))
4194 if (POINTER_TYPE_P (TREE_TYPE (elt))
4195 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4196 == need_type))
4197 return fold_build1 (INDIRECT_REF, need_type, elt);
4199 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4200 survives until RTL generation, there will be an error. */
4201 return exp;
4204 /* TREE_LIST is special because we need to look at TREE_VALUE
4205 and TREE_CHAIN, not TREE_OPERANDS. */
4206 else if (code == TREE_LIST)
4208 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4209 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4210 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4211 return exp;
4213 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4215 else
4216 switch (TREE_CODE_CLASS (code))
4218 case tcc_constant:
4219 case tcc_declaration:
4220 return exp;
4222 case tcc_exceptional:
4223 case tcc_unary:
4224 case tcc_binary:
4225 case tcc_comparison:
4226 case tcc_expression:
4227 case tcc_reference:
4228 case tcc_statement:
4229 switch (TREE_CODE_LENGTH (code))
4231 case 0:
4232 return exp;
4234 case 1:
4235 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4236 if (op0 == TREE_OPERAND (exp, 0))
4237 return exp;
4239 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4240 break;
4242 case 2:
4243 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4244 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4246 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4247 return exp;
4249 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4250 break;
4252 case 3:
4253 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4254 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4255 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4257 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4258 && op2 == TREE_OPERAND (exp, 2))
4259 return exp;
4261 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4262 break;
4264 case 4:
4265 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4266 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4267 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4268 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4270 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4271 && op2 == TREE_OPERAND (exp, 2)
4272 && op3 == TREE_OPERAND (exp, 3))
4273 return exp;
4275 new_tree
4276 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4277 break;
4279 default:
4280 gcc_unreachable ();
4282 break;
4284 case tcc_vl_exp:
4286 int i;
4288 new_tree = NULL_TREE;
4290 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4292 tree op = TREE_OPERAND (exp, i);
4293 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4294 if (new_op != op)
4296 if (!new_tree)
4297 new_tree = copy_node (exp);
4298 TREE_OPERAND (new_tree, i) = new_op;
4302 if (new_tree)
4304 new_tree = fold (new_tree);
4305 if (TREE_CODE (new_tree) == CALL_EXPR)
4306 process_call_operands (new_tree);
4308 else
4309 return exp;
4311 break;
4313 default:
4314 gcc_unreachable ();
4317 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4319 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4320 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4322 return new_tree;
4326 /* Subroutine of stabilize_reference; this is called for subtrees of
4327 references. Any expression with side-effects must be put in a SAVE_EXPR
4328 to ensure that it is only evaluated once.
4330 We don't put SAVE_EXPR nodes around everything, because assigning very
4331 simple expressions to temporaries causes us to miss good opportunities
4332 for optimizations. Among other things, the opportunity to fold in the
4333 addition of a constant into an addressing mode often gets lost, e.g.
4334 "y[i+1] += x;". In general, we take the approach that we should not make
4335 an assignment unless we are forced into it - i.e., that any non-side effect
4336 operator should be allowed, and that cse should take care of coalescing
4337 multiple utterances of the same expression should that prove fruitful. */
4339 static tree
4340 stabilize_reference_1 (tree e)
4342 tree result;
4343 enum tree_code code = TREE_CODE (e);
4345 /* We cannot ignore const expressions because it might be a reference
4346 to a const array but whose index contains side-effects. But we can
4347 ignore things that are actual constant or that already have been
4348 handled by this function. */
4350 if (tree_invariant_p (e))
4351 return e;
4353 switch (TREE_CODE_CLASS (code))
4355 case tcc_exceptional:
4356 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4357 have side-effects. */
4358 if (code == STATEMENT_LIST)
4359 return save_expr (e);
4360 /* FALLTHRU */
4361 case tcc_type:
4362 case tcc_declaration:
4363 case tcc_comparison:
4364 case tcc_statement:
4365 case tcc_expression:
4366 case tcc_reference:
4367 case tcc_vl_exp:
4368 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4369 so that it will only be evaluated once. */
4370 /* The reference (r) and comparison (<) classes could be handled as
4371 below, but it is generally faster to only evaluate them once. */
4372 if (TREE_SIDE_EFFECTS (e))
4373 return save_expr (e);
4374 return e;
4376 case tcc_constant:
4377 /* Constants need no processing. In fact, we should never reach
4378 here. */
4379 return e;
4381 case tcc_binary:
4382 /* Division is slow and tends to be compiled with jumps,
4383 especially the division by powers of 2 that is often
4384 found inside of an array reference. So do it just once. */
4385 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4386 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4387 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4388 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4389 return save_expr (e);
4390 /* Recursively stabilize each operand. */
4391 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4392 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4393 break;
4395 case tcc_unary:
4396 /* Recursively stabilize each operand. */
4397 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4398 break;
4400 default:
4401 gcc_unreachable ();
4404 TREE_TYPE (result) = TREE_TYPE (e);
4405 TREE_READONLY (result) = TREE_READONLY (e);
4406 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4407 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4409 return result;
4412 /* Stabilize a reference so that we can use it any number of times
4413 without causing its operands to be evaluated more than once.
4414 Returns the stabilized reference. This works by means of save_expr,
4415 so see the caveats in the comments about save_expr.
4417 Also allows conversion expressions whose operands are references.
4418 Any other kind of expression is returned unchanged. */
4420 tree
4421 stabilize_reference (tree ref)
4423 tree result;
4424 enum tree_code code = TREE_CODE (ref);
4426 switch (code)
4428 case VAR_DECL:
4429 case PARM_DECL:
4430 case RESULT_DECL:
4431 /* No action is needed in this case. */
4432 return ref;
4434 CASE_CONVERT:
4435 case FLOAT_EXPR:
4436 case FIX_TRUNC_EXPR:
4437 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4438 break;
4440 case INDIRECT_REF:
4441 result = build_nt (INDIRECT_REF,
4442 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4443 break;
4445 case COMPONENT_REF:
4446 result = build_nt (COMPONENT_REF,
4447 stabilize_reference (TREE_OPERAND (ref, 0)),
4448 TREE_OPERAND (ref, 1), NULL_TREE);
4449 break;
4451 case BIT_FIELD_REF:
4452 result = build_nt (BIT_FIELD_REF,
4453 stabilize_reference (TREE_OPERAND (ref, 0)),
4454 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4455 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4456 break;
4458 case ARRAY_REF:
4459 result = build_nt (ARRAY_REF,
4460 stabilize_reference (TREE_OPERAND (ref, 0)),
4461 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4462 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4463 break;
4465 case ARRAY_RANGE_REF:
4466 result = build_nt (ARRAY_RANGE_REF,
4467 stabilize_reference (TREE_OPERAND (ref, 0)),
4468 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4469 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4470 break;
4472 case COMPOUND_EXPR:
4473 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4474 it wouldn't be ignored. This matters when dealing with
4475 volatiles. */
4476 return stabilize_reference_1 (ref);
4478 /* If arg isn't a kind of lvalue we recognize, make no change.
4479 Caller should recognize the error for an invalid lvalue. */
4480 default:
4481 return ref;
4483 case ERROR_MARK:
4484 return error_mark_node;
4487 TREE_TYPE (result) = TREE_TYPE (ref);
4488 TREE_READONLY (result) = TREE_READONLY (ref);
4489 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4490 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4492 return result;
4495 /* Low-level constructors for expressions. */
4497 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4498 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4500 void
4501 recompute_tree_invariant_for_addr_expr (tree t)
4503 tree node;
4504 bool tc = true, se = false;
4506 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4508 /* We started out assuming this address is both invariant and constant, but
4509 does not have side effects. Now go down any handled components and see if
4510 any of them involve offsets that are either non-constant or non-invariant.
4511 Also check for side-effects.
4513 ??? Note that this code makes no attempt to deal with the case where
4514 taking the address of something causes a copy due to misalignment. */
4516 #define UPDATE_FLAGS(NODE) \
4517 do { tree _node = (NODE); \
4518 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4519 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4521 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4522 node = TREE_OPERAND (node, 0))
4524 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4525 array reference (probably made temporarily by the G++ front end),
4526 so ignore all the operands. */
4527 if ((TREE_CODE (node) == ARRAY_REF
4528 || TREE_CODE (node) == ARRAY_RANGE_REF)
4529 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4531 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4532 if (TREE_OPERAND (node, 2))
4533 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4534 if (TREE_OPERAND (node, 3))
4535 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4537 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4538 FIELD_DECL, apparently. The G++ front end can put something else
4539 there, at least temporarily. */
4540 else if (TREE_CODE (node) == COMPONENT_REF
4541 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4543 if (TREE_OPERAND (node, 2))
4544 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4548 node = lang_hooks.expr_to_decl (node, &tc, &se);
4550 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4551 the address, since &(*a)->b is a form of addition. If it's a constant, the
4552 address is constant too. If it's a decl, its address is constant if the
4553 decl is static. Everything else is not constant and, furthermore,
4554 taking the address of a volatile variable is not volatile. */
4555 if (TREE_CODE (node) == INDIRECT_REF
4556 || TREE_CODE (node) == MEM_REF)
4557 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4558 else if (CONSTANT_CLASS_P (node))
4560 else if (DECL_P (node))
4561 tc &= (staticp (node) != NULL_TREE);
4562 else
4564 tc = false;
4565 se |= TREE_SIDE_EFFECTS (node);
4569 TREE_CONSTANT (t) = tc;
4570 TREE_SIDE_EFFECTS (t) = se;
4571 #undef UPDATE_FLAGS
4574 /* Build an expression of code CODE, data type TYPE, and operands as
4575 specified. Expressions and reference nodes can be created this way.
4576 Constants, decls, types and misc nodes cannot be.
4578 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4579 enough for all extant tree codes. */
4581 tree
4582 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4584 tree t;
4586 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4588 t = make_node (code PASS_MEM_STAT);
4589 TREE_TYPE (t) = tt;
4591 return t;
4594 tree
4595 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4597 int length = sizeof (struct tree_exp);
4598 tree t;
4600 record_node_allocation_statistics (code, length);
4602 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4604 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4606 memset (t, 0, sizeof (struct tree_common));
4608 TREE_SET_CODE (t, code);
4610 TREE_TYPE (t) = type;
4611 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4612 TREE_OPERAND (t, 0) = node;
4613 if (node && !TYPE_P (node))
4615 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4616 TREE_READONLY (t) = TREE_READONLY (node);
4619 if (TREE_CODE_CLASS (code) == tcc_statement)
4621 if (code != DEBUG_BEGIN_STMT)
4622 TREE_SIDE_EFFECTS (t) = 1;
4624 else switch (code)
4626 case VA_ARG_EXPR:
4627 /* All of these have side-effects, no matter what their
4628 operands are. */
4629 TREE_SIDE_EFFECTS (t) = 1;
4630 TREE_READONLY (t) = 0;
4631 break;
4633 case INDIRECT_REF:
4634 /* Whether a dereference is readonly has nothing to do with whether
4635 its operand is readonly. */
4636 TREE_READONLY (t) = 0;
4637 break;
4639 case ADDR_EXPR:
4640 if (node)
4641 recompute_tree_invariant_for_addr_expr (t);
4642 break;
4644 default:
4645 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4646 && node && !TYPE_P (node)
4647 && TREE_CONSTANT (node))
4648 TREE_CONSTANT (t) = 1;
4649 if (TREE_CODE_CLASS (code) == tcc_reference
4650 && node && TREE_THIS_VOLATILE (node))
4651 TREE_THIS_VOLATILE (t) = 1;
4652 break;
4655 return t;
4658 #define PROCESS_ARG(N) \
4659 do { \
4660 TREE_OPERAND (t, N) = arg##N; \
4661 if (arg##N &&!TYPE_P (arg##N)) \
4663 if (TREE_SIDE_EFFECTS (arg##N)) \
4664 side_effects = 1; \
4665 if (!TREE_READONLY (arg##N) \
4666 && !CONSTANT_CLASS_P (arg##N)) \
4667 (void) (read_only = 0); \
4668 if (!TREE_CONSTANT (arg##N)) \
4669 (void) (constant = 0); \
4671 } while (0)
4673 tree
4674 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4676 bool constant, read_only, side_effects, div_by_zero;
4677 tree t;
4679 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4681 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4682 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4683 /* When sizetype precision doesn't match that of pointers
4684 we need to be able to build explicit extensions or truncations
4685 of the offset argument. */
4686 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4687 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4688 && TREE_CODE (arg1) == INTEGER_CST);
4690 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4691 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4692 && ptrofftype_p (TREE_TYPE (arg1)));
4694 t = make_node (code PASS_MEM_STAT);
4695 TREE_TYPE (t) = tt;
4697 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4698 result based on those same flags for the arguments. But if the
4699 arguments aren't really even `tree' expressions, we shouldn't be trying
4700 to do this. */
4702 /* Expressions without side effects may be constant if their
4703 arguments are as well. */
4704 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4705 || TREE_CODE_CLASS (code) == tcc_binary);
4706 read_only = 1;
4707 side_effects = TREE_SIDE_EFFECTS (t);
4709 switch (code)
4711 case TRUNC_DIV_EXPR:
4712 case CEIL_DIV_EXPR:
4713 case FLOOR_DIV_EXPR:
4714 case ROUND_DIV_EXPR:
4715 case EXACT_DIV_EXPR:
4716 case CEIL_MOD_EXPR:
4717 case FLOOR_MOD_EXPR:
4718 case ROUND_MOD_EXPR:
4719 case TRUNC_MOD_EXPR:
4720 div_by_zero = integer_zerop (arg1);
4721 break;
4722 default:
4723 div_by_zero = false;
4726 PROCESS_ARG (0);
4727 PROCESS_ARG (1);
4729 TREE_SIDE_EFFECTS (t) = side_effects;
4730 if (code == MEM_REF)
4732 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4734 tree o = TREE_OPERAND (arg0, 0);
4735 TREE_READONLY (t) = TREE_READONLY (o);
4736 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4739 else
4741 TREE_READONLY (t) = read_only;
4742 /* Don't mark X / 0 as constant. */
4743 TREE_CONSTANT (t) = constant && !div_by_zero;
4744 TREE_THIS_VOLATILE (t)
4745 = (TREE_CODE_CLASS (code) == tcc_reference
4746 && arg0 && TREE_THIS_VOLATILE (arg0));
4749 return t;
4753 tree
4754 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4755 tree arg2 MEM_STAT_DECL)
4757 bool constant, read_only, side_effects;
4758 tree t;
4760 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4761 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4763 t = make_node (code PASS_MEM_STAT);
4764 TREE_TYPE (t) = tt;
4766 read_only = 1;
4768 /* As a special exception, if COND_EXPR has NULL branches, we
4769 assume that it is a gimple statement and always consider
4770 it to have side effects. */
4771 if (code == COND_EXPR
4772 && tt == void_type_node
4773 && arg1 == NULL_TREE
4774 && arg2 == NULL_TREE)
4775 side_effects = true;
4776 else
4777 side_effects = TREE_SIDE_EFFECTS (t);
4779 PROCESS_ARG (0);
4780 PROCESS_ARG (1);
4781 PROCESS_ARG (2);
4783 if (code == COND_EXPR)
4784 TREE_READONLY (t) = read_only;
4786 TREE_SIDE_EFFECTS (t) = side_effects;
4787 TREE_THIS_VOLATILE (t)
4788 = (TREE_CODE_CLASS (code) == tcc_reference
4789 && arg0 && TREE_THIS_VOLATILE (arg0));
4791 return t;
4794 tree
4795 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4796 tree arg2, tree arg3 MEM_STAT_DECL)
4798 bool constant, read_only, side_effects;
4799 tree t;
4801 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4803 t = make_node (code PASS_MEM_STAT);
4804 TREE_TYPE (t) = tt;
4806 side_effects = TREE_SIDE_EFFECTS (t);
4808 PROCESS_ARG (0);
4809 PROCESS_ARG (1);
4810 PROCESS_ARG (2);
4811 PROCESS_ARG (3);
4813 TREE_SIDE_EFFECTS (t) = side_effects;
4814 TREE_THIS_VOLATILE (t)
4815 = (TREE_CODE_CLASS (code) == tcc_reference
4816 && arg0 && TREE_THIS_VOLATILE (arg0));
4818 return t;
4821 tree
4822 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4823 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4825 bool constant, read_only, side_effects;
4826 tree t;
4828 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4830 t = make_node (code PASS_MEM_STAT);
4831 TREE_TYPE (t) = tt;
4833 side_effects = TREE_SIDE_EFFECTS (t);
4835 PROCESS_ARG (0);
4836 PROCESS_ARG (1);
4837 PROCESS_ARG (2);
4838 PROCESS_ARG (3);
4839 PROCESS_ARG (4);
4841 TREE_SIDE_EFFECTS (t) = side_effects;
4842 if (code == TARGET_MEM_REF)
4844 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4846 tree o = TREE_OPERAND (arg0, 0);
4847 TREE_READONLY (t) = TREE_READONLY (o);
4848 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4851 else
4852 TREE_THIS_VOLATILE (t)
4853 = (TREE_CODE_CLASS (code) == tcc_reference
4854 && arg0 && TREE_THIS_VOLATILE (arg0));
4856 return t;
4859 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4860 on the pointer PTR. */
4862 tree
4863 build_simple_mem_ref_loc (location_t loc, tree ptr)
4865 poly_int64 offset = 0;
4866 tree ptype = TREE_TYPE (ptr);
4867 tree tem;
4868 /* For convenience allow addresses that collapse to a simple base
4869 and offset. */
4870 if (TREE_CODE (ptr) == ADDR_EXPR
4871 && (handled_component_p (TREE_OPERAND (ptr, 0))
4872 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4874 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4875 gcc_assert (ptr);
4876 if (TREE_CODE (ptr) == MEM_REF)
4878 offset += mem_ref_offset (ptr).force_shwi ();
4879 ptr = TREE_OPERAND (ptr, 0);
4881 else
4882 ptr = build_fold_addr_expr (ptr);
4883 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4885 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4886 ptr, build_int_cst (ptype, offset));
4887 SET_EXPR_LOCATION (tem, loc);
4888 return tem;
4891 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4893 poly_offset_int
4894 mem_ref_offset (const_tree t)
4896 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4897 SIGNED);
4900 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4901 offsetted by OFFSET units. */
4903 tree
4904 build_invariant_address (tree type, tree base, poly_int64 offset)
4906 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4907 build_fold_addr_expr (base),
4908 build_int_cst (ptr_type_node, offset));
4909 tree addr = build1 (ADDR_EXPR, type, ref);
4910 recompute_tree_invariant_for_addr_expr (addr);
4911 return addr;
4914 /* Similar except don't specify the TREE_TYPE
4915 and leave the TREE_SIDE_EFFECTS as 0.
4916 It is permissible for arguments to be null,
4917 or even garbage if their values do not matter. */
4919 tree
4920 build_nt (enum tree_code code, ...)
4922 tree t;
4923 int length;
4924 int i;
4925 va_list p;
4927 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4929 va_start (p, code);
4931 t = make_node (code);
4932 length = TREE_CODE_LENGTH (code);
4934 for (i = 0; i < length; i++)
4935 TREE_OPERAND (t, i) = va_arg (p, tree);
4937 va_end (p);
4938 return t;
4941 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4942 tree vec. */
4944 tree
4945 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4947 tree ret, t;
4948 unsigned int ix;
4950 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4951 CALL_EXPR_FN (ret) = fn;
4952 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4953 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4954 CALL_EXPR_ARG (ret, ix) = t;
4955 return ret;
4958 /* Create a DECL_... node of code CODE, name NAME (if non-null)
4959 and data type TYPE.
4960 We do NOT enter this node in any sort of symbol table.
4962 LOC is the location of the decl.
4964 layout_decl is used to set up the decl's storage layout.
4965 Other slots are initialized to 0 or null pointers. */
4967 tree
4968 build_decl (location_t loc, enum tree_code code, tree name,
4969 tree type MEM_STAT_DECL)
4971 tree t;
4973 t = make_node (code PASS_MEM_STAT);
4974 DECL_SOURCE_LOCATION (t) = loc;
4976 /* if (type == error_mark_node)
4977 type = integer_type_node; */
4978 /* That is not done, deliberately, so that having error_mark_node
4979 as the type can suppress useless errors in the use of this variable. */
4981 DECL_NAME (t) = name;
4982 TREE_TYPE (t) = type;
4984 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4985 layout_decl (t, 0);
4987 return t;
4990 /* Builds and returns function declaration with NAME and TYPE. */
4992 tree
4993 build_fn_decl (const char *name, tree type)
4995 tree id = get_identifier (name);
4996 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4998 DECL_EXTERNAL (decl) = 1;
4999 TREE_PUBLIC (decl) = 1;
5000 DECL_ARTIFICIAL (decl) = 1;
5001 TREE_NOTHROW (decl) = 1;
5003 return decl;
5006 vec<tree, va_gc> *all_translation_units;
5008 /* Builds a new translation-unit decl with name NAME, queues it in the
5009 global list of translation-unit decls and returns it. */
5011 tree
5012 build_translation_unit_decl (tree name)
5014 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5015 name, NULL_TREE);
5016 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5017 vec_safe_push (all_translation_units, tu);
5018 return tu;
5022 /* BLOCK nodes are used to represent the structure of binding contours
5023 and declarations, once those contours have been exited and their contents
5024 compiled. This information is used for outputting debugging info. */
5026 tree
5027 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5029 tree block = make_node (BLOCK);
5031 BLOCK_VARS (block) = vars;
5032 BLOCK_SUBBLOCKS (block) = subblocks;
5033 BLOCK_SUPERCONTEXT (block) = supercontext;
5034 BLOCK_CHAIN (block) = chain;
5035 return block;
5039 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5041 LOC is the location to use in tree T. */
5043 void
5044 protected_set_expr_location (tree t, location_t loc)
5046 if (CAN_HAVE_LOCATION_P (t))
5047 SET_EXPR_LOCATION (t, loc);
5050 /* Data used when collecting DECLs and TYPEs for language data removal. */
5052 struct free_lang_data_d
5054 free_lang_data_d () : decls (100), types (100) {}
5056 /* Worklist to avoid excessive recursion. */
5057 auto_vec<tree> worklist;
5059 /* Set of traversed objects. Used to avoid duplicate visits. */
5060 hash_set<tree> pset;
5062 /* Array of symbols to process with free_lang_data_in_decl. */
5063 auto_vec<tree> decls;
5065 /* Array of types to process with free_lang_data_in_type. */
5066 auto_vec<tree> types;
5070 /* Add type or decl T to one of the list of tree nodes that need their
5071 language data removed. The lists are held inside FLD. */
5073 static void
5074 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5076 if (DECL_P (t))
5077 fld->decls.safe_push (t);
5078 else if (TYPE_P (t))
5079 fld->types.safe_push (t);
5080 else
5081 gcc_unreachable ();
5084 /* Push tree node T into FLD->WORKLIST. */
5086 static inline void
5087 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5089 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5090 fld->worklist.safe_push ((t));
5095 /* Return simplified TYPE_NAME of TYPE. */
5097 static tree
5098 fld_simplified_type_name (tree type)
5100 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5101 return TYPE_NAME (type);
5102 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5103 TYPE_DECL if the type doesn't have linkage.
5104 this must match fld_ */
5105 if (type != TYPE_MAIN_VARIANT (type) || ! type_with_linkage_p (type))
5106 return DECL_NAME (TYPE_NAME (type));
5107 return TYPE_NAME (type);
5110 /* Do same comparsion as check_qualified_type skipping lang part of type
5111 and be more permissive about type names: we only care that names are
5112 same (for diagnostics) and that ODR names are the same. */
5114 static bool
5115 fld_type_variant_equal_p (tree t, tree v)
5117 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5118 /* We want to match incomplete variants with complete types.
5119 In this case we need to ignore alignment. */
5120 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5121 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5122 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5123 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5124 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5125 TYPE_ATTRIBUTES (v)))
5126 return false;
5128 return true;
5131 /* Find variant of FIRST that match T and create new one if necessary. */
5133 static tree
5134 fld_type_variant (tree first, tree t, struct free_lang_data_d *fld)
5136 if (first == TYPE_MAIN_VARIANT (t))
5137 return t;
5138 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5139 if (fld_type_variant_equal_p (t, v))
5140 return v;
5141 tree v = build_variant_type_copy (first);
5142 TYPE_READONLY (v) = TYPE_READONLY (t);
5143 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5144 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5145 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5146 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5147 TYPE_NAME (v) = TYPE_NAME (t);
5148 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5149 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5150 /* Variants of incomplete types should have alignment
5151 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5152 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5154 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5155 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5157 gcc_checking_assert (fld_type_variant_equal_p (t,v));
5158 add_tree_to_fld_list (v, fld);
5159 return v;
5162 /* Map complete types to incomplete types. */
5164 static hash_map<tree, tree> *fld_incomplete_types;
5166 /* For T being aggregate type try to turn it into a incomplete variant.
5167 Return T if no simplification is possible. */
5169 static tree
5170 fld_incomplete_type_of (tree t, struct free_lang_data_d *fld)
5172 if (!t)
5173 return NULL;
5174 if (POINTER_TYPE_P (t))
5176 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5177 if (t2 != TREE_TYPE (t))
5179 tree first;
5180 if (TREE_CODE (t) == POINTER_TYPE)
5181 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5182 TYPE_REF_CAN_ALIAS_ALL (t));
5183 else
5184 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5185 TYPE_REF_CAN_ALIAS_ALL (t));
5186 gcc_assert (TYPE_CANONICAL (t2) != t2
5187 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5188 add_tree_to_fld_list (first, fld);
5189 return fld_type_variant (first, t, fld);
5191 return t;
5193 if (!RECORD_OR_UNION_TYPE_P (t) || !COMPLETE_TYPE_P (t))
5194 return t;
5195 if (TYPE_MAIN_VARIANT (t) == t)
5197 bool existed;
5198 tree &copy
5199 = fld_incomplete_types->get_or_insert (t, &existed);
5201 if (!existed)
5203 copy = build_distinct_type_copy (t);
5205 /* It is possible type was not seen by free_lang_data yet. */
5206 add_tree_to_fld_list (copy, fld);
5207 TYPE_SIZE (copy) = NULL;
5208 SET_TYPE_MODE (copy, VOIDmode);
5209 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5210 TYPE_USER_ALIGN (copy) = 0;
5211 TYPE_SIZE_UNIT (copy) = NULL;
5212 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5213 TYPE_TYPELESS_STORAGE (copy) = 0;
5214 TREE_ADDRESSABLE (copy) = 0;
5215 if (AGGREGATE_TYPE_P (t))
5217 TYPE_FIELDS (copy) = NULL;
5218 TYPE_BINFO (copy) = NULL;
5220 else
5221 TYPE_VALUES (copy) = NULL;
5223 return copy;
5225 return (fld_type_variant
5226 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5229 /* Simplify type T for scenarios where we do not need complete pointer
5230 types. */
5232 static tree
5233 fld_simplified_type (tree t, struct free_lang_data_d *fld)
5235 if (t && POINTER_TYPE_P (t))
5236 return fld_incomplete_type_of (t, fld);
5237 return t;
5240 /* Reset the expression *EXPR_P, a size or position.
5242 ??? We could reset all non-constant sizes or positions. But it's cheap
5243 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5245 We need to reset self-referential sizes or positions because they cannot
5246 be gimplified and thus can contain a CALL_EXPR after the gimplification
5247 is finished, which will run afoul of LTO streaming. And they need to be
5248 reset to something essentially dummy but not constant, so as to preserve
5249 the properties of the object they are attached to. */
5251 static inline void
5252 free_lang_data_in_one_sizepos (tree *expr_p)
5254 tree expr = *expr_p;
5255 if (CONTAINS_PLACEHOLDER_P (expr))
5256 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5260 /* Reset all the fields in a binfo node BINFO. We only keep
5261 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5263 static void
5264 free_lang_data_in_binfo (tree binfo)
5266 unsigned i;
5267 tree t;
5269 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5271 BINFO_VIRTUALS (binfo) = NULL_TREE;
5272 BINFO_BASE_ACCESSES (binfo) = NULL;
5273 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5274 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5275 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5277 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5278 free_lang_data_in_binfo (t);
5282 /* Reset all language specific information still present in TYPE. */
5284 static void
5285 free_lang_data_in_type (tree type, struct free_lang_data_d *fld)
5287 gcc_assert (TYPE_P (type));
5289 /* Give the FE a chance to remove its own data first. */
5290 lang_hooks.free_lang_data (type);
5292 TREE_LANG_FLAG_0 (type) = 0;
5293 TREE_LANG_FLAG_1 (type) = 0;
5294 TREE_LANG_FLAG_2 (type) = 0;
5295 TREE_LANG_FLAG_3 (type) = 0;
5296 TREE_LANG_FLAG_4 (type) = 0;
5297 TREE_LANG_FLAG_5 (type) = 0;
5298 TREE_LANG_FLAG_6 (type) = 0;
5300 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5302 if (TREE_CODE (type) == FUNCTION_TYPE)
5304 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5305 /* Remove the const and volatile qualifiers from arguments. The
5306 C++ front end removes them, but the C front end does not,
5307 leading to false ODR violation errors when merging two
5308 instances of the same function signature compiled by
5309 different front ends. */
5310 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5312 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5313 tree arg_type = TREE_VALUE (p);
5315 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5317 int quals = TYPE_QUALS (arg_type)
5318 & ~TYPE_QUAL_CONST
5319 & ~TYPE_QUAL_VOLATILE;
5320 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5321 free_lang_data_in_type (TREE_VALUE (p), fld);
5323 /* C++ FE uses TREE_PURPOSE to store initial values. */
5324 TREE_PURPOSE (p) = NULL;
5327 else if (TREE_CODE (type) == METHOD_TYPE)
5329 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5330 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5332 /* C++ FE uses TREE_PURPOSE to store initial values. */
5333 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5334 TREE_PURPOSE (p) = NULL;
5337 else if (RECORD_OR_UNION_TYPE_P (type))
5339 /* Remove members that are not FIELD_DECLs from the field list
5340 of an aggregate. These occur in C++. */
5341 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5342 if (TREE_CODE (member) == FIELD_DECL)
5343 prev = &DECL_CHAIN (member);
5344 else
5345 *prev = DECL_CHAIN (member);
5347 TYPE_VFIELD (type) = NULL_TREE;
5349 if (TYPE_BINFO (type))
5351 free_lang_data_in_binfo (TYPE_BINFO (type));
5352 /* We need to preserve link to bases and virtual table for all
5353 polymorphic types to make devirtualization machinery working. */
5354 if (!BINFO_VTABLE (TYPE_BINFO (type))
5355 || !flag_devirtualize)
5356 TYPE_BINFO (type) = NULL;
5359 else if (INTEGRAL_TYPE_P (type)
5360 || SCALAR_FLOAT_TYPE_P (type)
5361 || FIXED_POINT_TYPE_P (type))
5363 if (TREE_CODE (type) == ENUMERAL_TYPE)
5365 /* Type values are used only for C++ ODR checking. Drop them
5366 for all type variants and non-ODR types. */
5367 if (TYPE_MAIN_VARIANT (type) != type
5368 || !type_with_linkage_p (type))
5369 TYPE_VALUES (type) = NULL;
5370 else
5371 /* Simplify representation by recording only values rather
5372 than const decls. */
5373 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5374 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5375 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5377 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5378 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5381 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5383 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5384 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5386 if (TYPE_CONTEXT (type)
5387 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5389 tree ctx = TYPE_CONTEXT (type);
5392 ctx = BLOCK_SUPERCONTEXT (ctx);
5394 while (ctx && TREE_CODE (ctx) == BLOCK);
5395 TYPE_CONTEXT (type) = ctx;
5398 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5399 TYPE_DECL if the type doesn't have linkage.
5400 this must match fld_ */
5401 if (type != TYPE_MAIN_VARIANT (type) || ! type_with_linkage_p (type))
5402 TYPE_STUB_DECL (type) = NULL;
5403 TYPE_NAME (type) = fld_simplified_type_name (type);
5407 /* Return true if DECL may need an assembler name to be set. */
5409 static inline bool
5410 need_assembler_name_p (tree decl)
5412 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5413 Rule merging. This makes type_odr_p to return true on those types during
5414 LTO and by comparing the mangled name, we can say what types are intended
5415 to be equivalent across compilation unit.
5417 We do not store names of type_in_anonymous_namespace_p.
5419 Record, union and enumeration type have linkage that allows use
5420 to check type_in_anonymous_namespace_p. We do not mangle compound types
5421 that always can be compared structurally.
5423 Similarly for builtin types, we compare properties of their main variant.
5424 A special case are integer types where mangling do make differences
5425 between char/signed char/unsigned char etc. Storing name for these makes
5426 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5427 See cp/mangle.c:write_builtin_type for details. */
5429 if (flag_lto_odr_type_mering
5430 && TREE_CODE (decl) == TYPE_DECL
5431 && DECL_NAME (decl)
5432 && decl == TYPE_NAME (TREE_TYPE (decl))
5433 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5434 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5435 && (type_with_linkage_p (TREE_TYPE (decl))
5436 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5437 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5438 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5439 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5440 if (!VAR_OR_FUNCTION_DECL_P (decl))
5441 return false;
5443 /* If DECL already has its assembler name set, it does not need a
5444 new one. */
5445 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5446 || DECL_ASSEMBLER_NAME_SET_P (decl))
5447 return false;
5449 /* Abstract decls do not need an assembler name. */
5450 if (DECL_ABSTRACT_P (decl))
5451 return false;
5453 /* For VAR_DECLs, only static, public and external symbols need an
5454 assembler name. */
5455 if (VAR_P (decl)
5456 && !TREE_STATIC (decl)
5457 && !TREE_PUBLIC (decl)
5458 && !DECL_EXTERNAL (decl))
5459 return false;
5461 if (TREE_CODE (decl) == FUNCTION_DECL)
5463 /* Do not set assembler name on builtins. Allow RTL expansion to
5464 decide whether to expand inline or via a regular call. */
5465 if (fndecl_built_in_p (decl)
5466 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5467 return false;
5469 /* Functions represented in the callgraph need an assembler name. */
5470 if (cgraph_node::get (decl) != NULL)
5471 return true;
5473 /* Unused and not public functions don't need an assembler name. */
5474 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5475 return false;
5478 return true;
5482 /* Reset all language specific information still present in symbol
5483 DECL. */
5485 static void
5486 free_lang_data_in_decl (tree decl, struct free_lang_data_d *fld)
5488 gcc_assert (DECL_P (decl));
5490 /* Give the FE a chance to remove its own data first. */
5491 lang_hooks.free_lang_data (decl);
5493 TREE_LANG_FLAG_0 (decl) = 0;
5494 TREE_LANG_FLAG_1 (decl) = 0;
5495 TREE_LANG_FLAG_2 (decl) = 0;
5496 TREE_LANG_FLAG_3 (decl) = 0;
5497 TREE_LANG_FLAG_4 (decl) = 0;
5498 TREE_LANG_FLAG_5 (decl) = 0;
5499 TREE_LANG_FLAG_6 (decl) = 0;
5501 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5502 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5503 if (TREE_CODE (decl) == FIELD_DECL)
5505 DECL_FCONTEXT (decl) = NULL;
5506 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5507 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5508 DECL_QUALIFIER (decl) = NULL_TREE;
5511 if (TREE_CODE (decl) == FUNCTION_DECL)
5513 struct cgraph_node *node;
5514 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5515 the address may be taken in other unit, so this flag has no practical
5516 use for middle-end.
5518 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5519 for public objects that indeed can not be adressed, but it is not
5520 the case. Set the flag to true so we do not get merge failures for
5521 i.e. virtual tables between units that take address of it and
5522 units that don't. */
5523 if (TREE_PUBLIC (decl))
5524 TREE_ADDRESSABLE (decl) = true;
5525 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5526 if (!(node = cgraph_node::get (decl))
5527 || (!node->definition && !node->clones))
5529 if (node)
5530 node->release_body ();
5531 else
5533 release_function_body (decl);
5534 DECL_ARGUMENTS (decl) = NULL;
5535 DECL_RESULT (decl) = NULL;
5536 DECL_INITIAL (decl) = error_mark_node;
5539 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5541 tree t;
5543 /* If DECL has a gimple body, then the context for its
5544 arguments must be DECL. Otherwise, it doesn't really
5545 matter, as we will not be emitting any code for DECL. In
5546 general, there may be other instances of DECL created by
5547 the front end and since PARM_DECLs are generally shared,
5548 their DECL_CONTEXT changes as the replicas of DECL are
5549 created. The only time where DECL_CONTEXT is important
5550 is for the FUNCTION_DECLs that have a gimple body (since
5551 the PARM_DECL will be used in the function's body). */
5552 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5553 DECL_CONTEXT (t) = decl;
5554 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5555 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5556 = target_option_default_node;
5557 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5558 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5559 = optimization_default_node;
5562 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5563 At this point, it is not needed anymore. */
5564 DECL_SAVED_TREE (decl) = NULL_TREE;
5566 /* Clear the abstract origin if it refers to a method.
5567 Otherwise dwarf2out.c will ICE as we splice functions out of
5568 TYPE_FIELDS and thus the origin will not be output
5569 correctly. */
5570 if (DECL_ABSTRACT_ORIGIN (decl)
5571 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5572 && RECORD_OR_UNION_TYPE_P
5573 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5574 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5576 DECL_VINDEX (decl) = NULL_TREE;
5578 else if (VAR_P (decl))
5580 /* See comment above why we set the flag for functoins. */
5581 if (TREE_PUBLIC (decl))
5582 TREE_ADDRESSABLE (decl) = true;
5583 if ((DECL_EXTERNAL (decl)
5584 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5585 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5586 DECL_INITIAL (decl) = NULL_TREE;
5588 else if (TREE_CODE (decl) == TYPE_DECL)
5590 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5591 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5592 /* TREE_PUBLIC is used to tell if type is anonymous. */
5593 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5594 DECL_INITIAL (decl) = NULL_TREE;
5595 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5596 DECL_MODE (decl) = VOIDmode;
5597 TREE_TYPE (decl) = void_type_node;
5598 SET_DECL_ALIGN (decl, 0);
5600 else if (TREE_CODE (decl) == FIELD_DECL)
5602 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5603 DECL_INITIAL (decl) = NULL_TREE;
5605 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5606 && DECL_INITIAL (decl)
5607 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5609 /* Strip builtins from the translation-unit BLOCK. We still have targets
5610 without builtin_decl_explicit support and also builtins are shared
5611 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5612 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5613 while (*nextp)
5615 tree var = *nextp;
5616 if (fndecl_built_in_p (var))
5617 *nextp = TREE_CHAIN (var);
5618 else
5619 nextp = &TREE_CHAIN (var);
5622 /* We need to keep field decls associated with their trees. Otherwise tree
5623 merging may merge some fileds and keep others disjoint wich in turn will
5624 not do well with TREE_CHAIN pointers linking them.
5626 Also do not drop containing types for virtual methods and tables because
5627 these are needed by devirtualization. */
5628 if (TREE_CODE (decl) != FIELD_DECL
5629 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5630 || !DECL_VIRTUAL_P (decl)))
5632 tree ctx = DECL_CONTEXT (decl);
5633 /* Variably modified types are needed for tree_is_indexable to decide
5634 whether the type needs to go to local or global section.
5635 This code is semi-broken but for now it is easiest to keep contexts
5636 as expected. */
5637 if (ctx && TYPE_P (ctx)
5638 && !variably_modified_type_p (ctx, NULL_TREE))
5640 while (ctx && TYPE_P (ctx))
5641 ctx = TYPE_CONTEXT (ctx);
5642 DECL_CONTEXT (decl) = ctx;
5648 /* Operand callback helper for free_lang_data_in_node. *TP is the
5649 subtree operand being considered. */
5651 static tree
5652 find_decls_types_r (tree *tp, int *ws, void *data)
5654 tree t = *tp;
5655 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5657 if (TREE_CODE (t) == TREE_LIST)
5658 return NULL_TREE;
5660 /* Language specific nodes will be removed, so there is no need
5661 to gather anything under them. */
5662 if (is_lang_specific (t))
5664 *ws = 0;
5665 return NULL_TREE;
5668 if (DECL_P (t))
5670 /* Note that walk_tree does not traverse every possible field in
5671 decls, so we have to do our own traversals here. */
5672 add_tree_to_fld_list (t, fld);
5674 fld_worklist_push (DECL_NAME (t), fld);
5675 fld_worklist_push (DECL_CONTEXT (t), fld);
5676 fld_worklist_push (DECL_SIZE (t), fld);
5677 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5679 /* We are going to remove everything under DECL_INITIAL for
5680 TYPE_DECLs. No point walking them. */
5681 if (TREE_CODE (t) != TYPE_DECL)
5682 fld_worklist_push (DECL_INITIAL (t), fld);
5684 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5685 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5687 if (TREE_CODE (t) == FUNCTION_DECL)
5689 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5690 fld_worklist_push (DECL_RESULT (t), fld);
5692 else if (TREE_CODE (t) == FIELD_DECL)
5694 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5695 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5696 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5697 fld_worklist_push (DECL_FCONTEXT (t), fld);
5700 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5701 && DECL_HAS_VALUE_EXPR_P (t))
5702 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5704 if (TREE_CODE (t) != FIELD_DECL
5705 && TREE_CODE (t) != TYPE_DECL)
5706 fld_worklist_push (TREE_CHAIN (t), fld);
5707 *ws = 0;
5709 else if (TYPE_P (t))
5711 /* Note that walk_tree does not traverse every possible field in
5712 types, so we have to do our own traversals here. */
5713 add_tree_to_fld_list (t, fld);
5715 if (!RECORD_OR_UNION_TYPE_P (t))
5716 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5717 fld_worklist_push (TYPE_SIZE (t), fld);
5718 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5719 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5720 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5721 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5722 fld_worklist_push (TYPE_NAME (t), fld);
5723 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5724 lists, we may look types up in these lists and use them while
5725 optimizing the function body. Thus we need to free lang data
5726 in them. */
5727 if (TREE_CODE (t) == POINTER_TYPE)
5728 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5729 if (TREE_CODE (t) == REFERENCE_TYPE)
5730 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5731 if (!POINTER_TYPE_P (t))
5732 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5733 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5734 if (!RECORD_OR_UNION_TYPE_P (t))
5735 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5736 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5737 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5738 do not and want not to reach unused variants this way. */
5739 if (TYPE_CONTEXT (t))
5741 tree ctx = TYPE_CONTEXT (t);
5742 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5743 So push that instead. */
5744 while (ctx && TREE_CODE (ctx) == BLOCK)
5745 ctx = BLOCK_SUPERCONTEXT (ctx);
5746 fld_worklist_push (ctx, fld);
5748 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5749 and want not to reach unused types this way. */
5751 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5753 unsigned i;
5754 tree tem;
5755 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5756 fld_worklist_push (TREE_TYPE (tem), fld);
5757 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5758 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5760 if (RECORD_OR_UNION_TYPE_P (t))
5762 tree tem;
5763 /* Push all TYPE_FIELDS - there can be interleaving interesting
5764 and non-interesting things. */
5765 tem = TYPE_FIELDS (t);
5766 while (tem)
5768 if (TREE_CODE (tem) == FIELD_DECL)
5769 fld_worklist_push (tem, fld);
5770 tem = TREE_CHAIN (tem);
5773 if (FUNC_OR_METHOD_TYPE_P (t))
5774 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5776 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5777 *ws = 0;
5779 else if (TREE_CODE (t) == BLOCK)
5781 for (tree *tem = &BLOCK_VARS (t); *tem; )
5783 if (TREE_CODE (*tem) != VAR_DECL
5784 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem)))
5786 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5787 && TREE_CODE (*tem) != PARM_DECL);
5788 *tem = TREE_CHAIN (*tem);
5790 else
5792 fld_worklist_push (*tem, fld);
5793 tem = &TREE_CHAIN (*tem);
5796 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5797 fld_worklist_push (tem, fld);
5798 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5801 if (TREE_CODE (t) != IDENTIFIER_NODE
5802 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5803 fld_worklist_push (TREE_TYPE (t), fld);
5805 return NULL_TREE;
5809 /* Find decls and types in T. */
5811 static void
5812 find_decls_types (tree t, struct free_lang_data_d *fld)
5814 while (1)
5816 if (!fld->pset.contains (t))
5817 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5818 if (fld->worklist.is_empty ())
5819 break;
5820 t = fld->worklist.pop ();
5824 /* Translate all the types in LIST with the corresponding runtime
5825 types. */
5827 static tree
5828 get_eh_types_for_runtime (tree list)
5830 tree head, prev;
5832 if (list == NULL_TREE)
5833 return NULL_TREE;
5835 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5836 prev = head;
5837 list = TREE_CHAIN (list);
5838 while (list)
5840 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5841 TREE_CHAIN (prev) = n;
5842 prev = TREE_CHAIN (prev);
5843 list = TREE_CHAIN (list);
5846 return head;
5850 /* Find decls and types referenced in EH region R and store them in
5851 FLD->DECLS and FLD->TYPES. */
5853 static void
5854 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5856 switch (r->type)
5858 case ERT_CLEANUP:
5859 break;
5861 case ERT_TRY:
5863 eh_catch c;
5865 /* The types referenced in each catch must first be changed to the
5866 EH types used at runtime. This removes references to FE types
5867 in the region. */
5868 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5870 c->type_list = get_eh_types_for_runtime (c->type_list);
5871 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5874 break;
5876 case ERT_ALLOWED_EXCEPTIONS:
5877 r->u.allowed.type_list
5878 = get_eh_types_for_runtime (r->u.allowed.type_list);
5879 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5880 break;
5882 case ERT_MUST_NOT_THROW:
5883 walk_tree (&r->u.must_not_throw.failure_decl,
5884 find_decls_types_r, fld, &fld->pset);
5885 break;
5890 /* Find decls and types referenced in cgraph node N and store them in
5891 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5892 look for *every* kind of DECL and TYPE node reachable from N,
5893 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5894 NAMESPACE_DECLs, etc). */
5896 static void
5897 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5899 basic_block bb;
5900 struct function *fn;
5901 unsigned ix;
5902 tree t;
5904 find_decls_types (n->decl, fld);
5906 if (!gimple_has_body_p (n->decl))
5907 return;
5909 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5911 fn = DECL_STRUCT_FUNCTION (n->decl);
5913 /* Traverse locals. */
5914 FOR_EACH_LOCAL_DECL (fn, ix, t)
5915 find_decls_types (t, fld);
5917 /* Traverse EH regions in FN. */
5919 eh_region r;
5920 FOR_ALL_EH_REGION_FN (r, fn)
5921 find_decls_types_in_eh_region (r, fld);
5924 /* Traverse every statement in FN. */
5925 FOR_EACH_BB_FN (bb, fn)
5927 gphi_iterator psi;
5928 gimple_stmt_iterator si;
5929 unsigned i;
5931 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5933 gphi *phi = psi.phi ();
5935 for (i = 0; i < gimple_phi_num_args (phi); i++)
5937 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5938 find_decls_types (*arg_p, fld);
5942 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5944 gimple *stmt = gsi_stmt (si);
5946 if (is_gimple_call (stmt))
5947 find_decls_types (gimple_call_fntype (stmt), fld);
5949 for (i = 0; i < gimple_num_ops (stmt); i++)
5951 tree arg = gimple_op (stmt, i);
5952 find_decls_types (arg, fld);
5959 /* Find decls and types referenced in varpool node N and store them in
5960 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5961 look for *every* kind of DECL and TYPE node reachable from N,
5962 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5963 NAMESPACE_DECLs, etc). */
5965 static void
5966 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5968 find_decls_types (v->decl, fld);
5971 /* If T needs an assembler name, have one created for it. */
5973 void
5974 assign_assembler_name_if_needed (tree t)
5976 if (need_assembler_name_p (t))
5978 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5979 diagnostics that use input_location to show locus
5980 information. The problem here is that, at this point,
5981 input_location is generally anchored to the end of the file
5982 (since the parser is long gone), so we don't have a good
5983 position to pin it to.
5985 To alleviate this problem, this uses the location of T's
5986 declaration. Examples of this are
5987 testsuite/g++.dg/template/cond2.C and
5988 testsuite/g++.dg/template/pr35240.C. */
5989 location_t saved_location = input_location;
5990 input_location = DECL_SOURCE_LOCATION (t);
5992 decl_assembler_name (t);
5994 input_location = saved_location;
5999 /* Free language specific information for every operand and expression
6000 in every node of the call graph. This process operates in three stages:
6002 1- Every callgraph node and varpool node is traversed looking for
6003 decls and types embedded in them. This is a more exhaustive
6004 search than that done by find_referenced_vars, because it will
6005 also collect individual fields, decls embedded in types, etc.
6007 2- All the decls found are sent to free_lang_data_in_decl.
6009 3- All the types found are sent to free_lang_data_in_type.
6011 The ordering between decls and types is important because
6012 free_lang_data_in_decl sets assembler names, which includes
6013 mangling. So types cannot be freed up until assembler names have
6014 been set up. */
6016 static void
6017 free_lang_data_in_cgraph (void)
6019 struct cgraph_node *n;
6020 varpool_node *v;
6021 struct free_lang_data_d fld;
6022 tree t;
6023 unsigned i;
6024 alias_pair *p;
6026 /* Find decls and types in the body of every function in the callgraph. */
6027 FOR_EACH_FUNCTION (n)
6028 find_decls_types_in_node (n, &fld);
6030 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6031 find_decls_types (p->decl, &fld);
6033 /* Find decls and types in every varpool symbol. */
6034 FOR_EACH_VARIABLE (v)
6035 find_decls_types_in_var (v, &fld);
6037 /* Set the assembler name on every decl found. We need to do this
6038 now because free_lang_data_in_decl will invalidate data needed
6039 for mangling. This breaks mangling on interdependent decls. */
6040 FOR_EACH_VEC_ELT (fld.decls, i, t)
6041 assign_assembler_name_if_needed (t);
6043 /* Traverse every decl found freeing its language data. */
6044 FOR_EACH_VEC_ELT (fld.decls, i, t)
6045 free_lang_data_in_decl (t, &fld);
6047 /* Traverse every type found freeing its language data. */
6048 FOR_EACH_VEC_ELT (fld.types, i, t)
6049 free_lang_data_in_type (t, &fld);
6050 if (flag_checking)
6052 FOR_EACH_VEC_ELT (fld.types, i, t)
6053 verify_type (t);
6058 /* Free resources that are used by FE but are not needed once they are done. */
6060 static unsigned
6061 free_lang_data (void)
6063 unsigned i;
6065 /* If we are the LTO frontend we have freed lang-specific data already. */
6066 if (in_lto_p
6067 || (!flag_generate_lto && !flag_generate_offload))
6068 return 0;
6070 fld_incomplete_types = new hash_map<tree, tree>;
6072 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6073 if (vec_safe_is_empty (all_translation_units))
6074 build_translation_unit_decl (NULL_TREE);
6076 /* Allocate and assign alias sets to the standard integer types
6077 while the slots are still in the way the frontends generated them. */
6078 for (i = 0; i < itk_none; ++i)
6079 if (integer_types[i])
6080 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6082 /* Traverse the IL resetting language specific information for
6083 operands, expressions, etc. */
6084 free_lang_data_in_cgraph ();
6086 /* Create gimple variants for common types. */
6087 for (unsigned i = 0;
6088 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6089 ++i)
6090 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6092 /* Reset some langhooks. Do not reset types_compatible_p, it may
6093 still be used indirectly via the get_alias_set langhook. */
6094 lang_hooks.dwarf_name = lhd_dwarf_name;
6095 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6096 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6097 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6098 lang_hooks.print_xnode = lhd_print_tree_nothing;
6099 lang_hooks.print_decl = lhd_print_tree_nothing;
6100 lang_hooks.print_type = lhd_print_tree_nothing;
6101 lang_hooks.print_identifier = lhd_print_tree_nothing;
6103 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6105 /* We do not want the default decl_assembler_name implementation,
6106 rather if we have fixed everything we want a wrapper around it
6107 asserting that all non-local symbols already got their assembler
6108 name and only produce assembler names for local symbols. Or rather
6109 make sure we never call decl_assembler_name on local symbols and
6110 devise a separate, middle-end private scheme for it. */
6112 /* Reset diagnostic machinery. */
6113 tree_diagnostics_defaults (global_dc);
6115 rebuild_type_inheritance_graph ();
6117 delete fld_incomplete_types;
6119 return 0;
6123 namespace {
6125 const pass_data pass_data_ipa_free_lang_data =
6127 SIMPLE_IPA_PASS, /* type */
6128 "*free_lang_data", /* name */
6129 OPTGROUP_NONE, /* optinfo_flags */
6130 TV_IPA_FREE_LANG_DATA, /* tv_id */
6131 0, /* properties_required */
6132 0, /* properties_provided */
6133 0, /* properties_destroyed */
6134 0, /* todo_flags_start */
6135 0, /* todo_flags_finish */
6138 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6140 public:
6141 pass_ipa_free_lang_data (gcc::context *ctxt)
6142 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6145 /* opt_pass methods: */
6146 virtual unsigned int execute (function *) { return free_lang_data (); }
6148 }; // class pass_ipa_free_lang_data
6150 } // anon namespace
6152 simple_ipa_opt_pass *
6153 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6155 return new pass_ipa_free_lang_data (ctxt);
6158 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6159 of the various TYPE_QUAL values. */
6161 static void
6162 set_type_quals (tree type, int type_quals)
6164 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6165 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6166 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6167 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6168 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6171 /* Returns true iff CAND and BASE have equivalent language-specific
6172 qualifiers. */
6174 bool
6175 check_lang_type (const_tree cand, const_tree base)
6177 if (lang_hooks.types.type_hash_eq == NULL)
6178 return true;
6179 /* type_hash_eq currently only applies to these types. */
6180 if (TREE_CODE (cand) != FUNCTION_TYPE
6181 && TREE_CODE (cand) != METHOD_TYPE)
6182 return true;
6183 return lang_hooks.types.type_hash_eq (cand, base);
6186 /* Returns true iff unqualified CAND and BASE are equivalent. */
6188 bool
6189 check_base_type (const_tree cand, const_tree base)
6191 return (TYPE_NAME (cand) == TYPE_NAME (base)
6192 /* Apparently this is needed for Objective-C. */
6193 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6194 /* Check alignment. */
6195 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6196 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6197 TYPE_ATTRIBUTES (base)));
6200 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6202 bool
6203 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6205 return (TYPE_QUALS (cand) == type_quals
6206 && check_base_type (cand, base)
6207 && check_lang_type (cand, base));
6210 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6212 static bool
6213 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6215 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6216 && TYPE_NAME (cand) == TYPE_NAME (base)
6217 /* Apparently this is needed for Objective-C. */
6218 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6219 /* Check alignment. */
6220 && TYPE_ALIGN (cand) == align
6221 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6222 TYPE_ATTRIBUTES (base))
6223 && check_lang_type (cand, base));
6226 /* This function checks to see if TYPE matches the size one of the built-in
6227 atomic types, and returns that core atomic type. */
6229 static tree
6230 find_atomic_core_type (tree type)
6232 tree base_atomic_type;
6234 /* Only handle complete types. */
6235 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6236 return NULL_TREE;
6238 switch (tree_to_uhwi (TYPE_SIZE (type)))
6240 case 8:
6241 base_atomic_type = atomicQI_type_node;
6242 break;
6244 case 16:
6245 base_atomic_type = atomicHI_type_node;
6246 break;
6248 case 32:
6249 base_atomic_type = atomicSI_type_node;
6250 break;
6252 case 64:
6253 base_atomic_type = atomicDI_type_node;
6254 break;
6256 case 128:
6257 base_atomic_type = atomicTI_type_node;
6258 break;
6260 default:
6261 base_atomic_type = NULL_TREE;
6264 return base_atomic_type;
6267 /* Return a version of the TYPE, qualified as indicated by the
6268 TYPE_QUALS, if one exists. If no qualified version exists yet,
6269 return NULL_TREE. */
6271 tree
6272 get_qualified_type (tree type, int type_quals)
6274 tree t;
6276 if (TYPE_QUALS (type) == type_quals)
6277 return type;
6279 /* Search the chain of variants to see if there is already one there just
6280 like the one we need to have. If so, use that existing one. We must
6281 preserve the TYPE_NAME, since there is code that depends on this. */
6282 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6283 if (check_qualified_type (t, type, type_quals))
6284 return t;
6286 return NULL_TREE;
6289 /* Like get_qualified_type, but creates the type if it does not
6290 exist. This function never returns NULL_TREE. */
6292 tree
6293 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6295 tree t;
6297 /* See if we already have the appropriate qualified variant. */
6298 t = get_qualified_type (type, type_quals);
6300 /* If not, build it. */
6301 if (!t)
6303 t = build_variant_type_copy (type PASS_MEM_STAT);
6304 set_type_quals (t, type_quals);
6306 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6308 /* See if this object can map to a basic atomic type. */
6309 tree atomic_type = find_atomic_core_type (type);
6310 if (atomic_type)
6312 /* Ensure the alignment of this type is compatible with
6313 the required alignment of the atomic type. */
6314 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6315 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6319 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6320 /* Propagate structural equality. */
6321 SET_TYPE_STRUCTURAL_EQUALITY (t);
6322 else if (TYPE_CANONICAL (type) != type)
6323 /* Build the underlying canonical type, since it is different
6324 from TYPE. */
6326 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6327 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6329 else
6330 /* T is its own canonical type. */
6331 TYPE_CANONICAL (t) = t;
6335 return t;
6338 /* Create a variant of type T with alignment ALIGN. */
6340 tree
6341 build_aligned_type (tree type, unsigned int align)
6343 tree t;
6345 if (TYPE_PACKED (type)
6346 || TYPE_ALIGN (type) == align)
6347 return type;
6349 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6350 if (check_aligned_type (t, type, align))
6351 return t;
6353 t = build_variant_type_copy (type);
6354 SET_TYPE_ALIGN (t, align);
6355 TYPE_USER_ALIGN (t) = 1;
6357 return t;
6360 /* Create a new distinct copy of TYPE. The new type is made its own
6361 MAIN_VARIANT. If TYPE requires structural equality checks, the
6362 resulting type requires structural equality checks; otherwise, its
6363 TYPE_CANONICAL points to itself. */
6365 tree
6366 build_distinct_type_copy (tree type MEM_STAT_DECL)
6368 tree t = copy_node (type PASS_MEM_STAT);
6370 TYPE_POINTER_TO (t) = 0;
6371 TYPE_REFERENCE_TO (t) = 0;
6373 /* Set the canonical type either to a new equivalence class, or
6374 propagate the need for structural equality checks. */
6375 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6376 SET_TYPE_STRUCTURAL_EQUALITY (t);
6377 else
6378 TYPE_CANONICAL (t) = t;
6380 /* Make it its own variant. */
6381 TYPE_MAIN_VARIANT (t) = t;
6382 TYPE_NEXT_VARIANT (t) = 0;
6384 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6385 whose TREE_TYPE is not t. This can also happen in the Ada
6386 frontend when using subtypes. */
6388 return t;
6391 /* Create a new variant of TYPE, equivalent but distinct. This is so
6392 the caller can modify it. TYPE_CANONICAL for the return type will
6393 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6394 are considered equal by the language itself (or that both types
6395 require structural equality checks). */
6397 tree
6398 build_variant_type_copy (tree type MEM_STAT_DECL)
6400 tree t, m = TYPE_MAIN_VARIANT (type);
6402 t = build_distinct_type_copy (type PASS_MEM_STAT);
6404 /* Since we're building a variant, assume that it is a non-semantic
6405 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6406 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6407 /* Type variants have no alias set defined. */
6408 TYPE_ALIAS_SET (t) = -1;
6410 /* Add the new type to the chain of variants of TYPE. */
6411 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6412 TYPE_NEXT_VARIANT (m) = t;
6413 TYPE_MAIN_VARIANT (t) = m;
6415 return t;
6418 /* Return true if the from tree in both tree maps are equal. */
6421 tree_map_base_eq (const void *va, const void *vb)
6423 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6424 *const b = (const struct tree_map_base *) vb;
6425 return (a->from == b->from);
6428 /* Hash a from tree in a tree_base_map. */
6430 unsigned int
6431 tree_map_base_hash (const void *item)
6433 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6436 /* Return true if this tree map structure is marked for garbage collection
6437 purposes. We simply return true if the from tree is marked, so that this
6438 structure goes away when the from tree goes away. */
6441 tree_map_base_marked_p (const void *p)
6443 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6446 /* Hash a from tree in a tree_map. */
6448 unsigned int
6449 tree_map_hash (const void *item)
6451 return (((const struct tree_map *) item)->hash);
6454 /* Hash a from tree in a tree_decl_map. */
6456 unsigned int
6457 tree_decl_map_hash (const void *item)
6459 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6462 /* Return the initialization priority for DECL. */
6464 priority_type
6465 decl_init_priority_lookup (tree decl)
6467 symtab_node *snode = symtab_node::get (decl);
6469 if (!snode)
6470 return DEFAULT_INIT_PRIORITY;
6471 return
6472 snode->get_init_priority ();
6475 /* Return the finalization priority for DECL. */
6477 priority_type
6478 decl_fini_priority_lookup (tree decl)
6480 cgraph_node *node = cgraph_node::get (decl);
6482 if (!node)
6483 return DEFAULT_INIT_PRIORITY;
6484 return
6485 node->get_fini_priority ();
6488 /* Set the initialization priority for DECL to PRIORITY. */
6490 void
6491 decl_init_priority_insert (tree decl, priority_type priority)
6493 struct symtab_node *snode;
6495 if (priority == DEFAULT_INIT_PRIORITY)
6497 snode = symtab_node::get (decl);
6498 if (!snode)
6499 return;
6501 else if (VAR_P (decl))
6502 snode = varpool_node::get_create (decl);
6503 else
6504 snode = cgraph_node::get_create (decl);
6505 snode->set_init_priority (priority);
6508 /* Set the finalization priority for DECL to PRIORITY. */
6510 void
6511 decl_fini_priority_insert (tree decl, priority_type priority)
6513 struct cgraph_node *node;
6515 if (priority == DEFAULT_INIT_PRIORITY)
6517 node = cgraph_node::get (decl);
6518 if (!node)
6519 return;
6521 else
6522 node = cgraph_node::get_create (decl);
6523 node->set_fini_priority (priority);
6526 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6528 static void
6529 print_debug_expr_statistics (void)
6531 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6532 (long) debug_expr_for_decl->size (),
6533 (long) debug_expr_for_decl->elements (),
6534 debug_expr_for_decl->collisions ());
6537 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6539 static void
6540 print_value_expr_statistics (void)
6542 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6543 (long) value_expr_for_decl->size (),
6544 (long) value_expr_for_decl->elements (),
6545 value_expr_for_decl->collisions ());
6548 /* Lookup a debug expression for FROM, and return it if we find one. */
6550 tree
6551 decl_debug_expr_lookup (tree from)
6553 struct tree_decl_map *h, in;
6554 in.base.from = from;
6556 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6557 if (h)
6558 return h->to;
6559 return NULL_TREE;
6562 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6564 void
6565 decl_debug_expr_insert (tree from, tree to)
6567 struct tree_decl_map *h;
6569 h = ggc_alloc<tree_decl_map> ();
6570 h->base.from = from;
6571 h->to = to;
6572 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6575 /* Lookup a value expression for FROM, and return it if we find one. */
6577 tree
6578 decl_value_expr_lookup (tree from)
6580 struct tree_decl_map *h, in;
6581 in.base.from = from;
6583 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6584 if (h)
6585 return h->to;
6586 return NULL_TREE;
6589 /* Insert a mapping FROM->TO in the value expression hashtable. */
6591 void
6592 decl_value_expr_insert (tree from, tree to)
6594 struct tree_decl_map *h;
6596 h = ggc_alloc<tree_decl_map> ();
6597 h->base.from = from;
6598 h->to = to;
6599 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6602 /* Lookup a vector of debug arguments for FROM, and return it if we
6603 find one. */
6605 vec<tree, va_gc> **
6606 decl_debug_args_lookup (tree from)
6608 struct tree_vec_map *h, in;
6610 if (!DECL_HAS_DEBUG_ARGS_P (from))
6611 return NULL;
6612 gcc_checking_assert (debug_args_for_decl != NULL);
6613 in.base.from = from;
6614 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6615 if (h)
6616 return &h->to;
6617 return NULL;
6620 /* Insert a mapping FROM->empty vector of debug arguments in the value
6621 expression hashtable. */
6623 vec<tree, va_gc> **
6624 decl_debug_args_insert (tree from)
6626 struct tree_vec_map *h;
6627 tree_vec_map **loc;
6629 if (DECL_HAS_DEBUG_ARGS_P (from))
6630 return decl_debug_args_lookup (from);
6631 if (debug_args_for_decl == NULL)
6632 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6633 h = ggc_alloc<tree_vec_map> ();
6634 h->base.from = from;
6635 h->to = NULL;
6636 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6637 *loc = h;
6638 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6639 return &h->to;
6642 /* Hashing of types so that we don't make duplicates.
6643 The entry point is `type_hash_canon'. */
6645 /* Generate the default hash code for TYPE. This is designed for
6646 speed, rather than maximum entropy. */
6648 hashval_t
6649 type_hash_canon_hash (tree type)
6651 inchash::hash hstate;
6653 hstate.add_int (TREE_CODE (type));
6655 if (TREE_TYPE (type))
6656 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6658 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6659 /* Just the identifier is adequate to distinguish. */
6660 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6662 switch (TREE_CODE (type))
6664 case METHOD_TYPE:
6665 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6666 /* FALLTHROUGH. */
6667 case FUNCTION_TYPE:
6668 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6669 if (TREE_VALUE (t) != error_mark_node)
6670 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6671 break;
6673 case OFFSET_TYPE:
6674 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6675 break;
6677 case ARRAY_TYPE:
6679 if (TYPE_DOMAIN (type))
6680 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6681 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6683 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6684 hstate.add_object (typeless);
6687 break;
6689 case INTEGER_TYPE:
6691 tree t = TYPE_MAX_VALUE (type);
6692 if (!t)
6693 t = TYPE_MIN_VALUE (type);
6694 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6695 hstate.add_object (TREE_INT_CST_ELT (t, i));
6696 break;
6699 case REAL_TYPE:
6700 case FIXED_POINT_TYPE:
6702 unsigned prec = TYPE_PRECISION (type);
6703 hstate.add_object (prec);
6704 break;
6707 case VECTOR_TYPE:
6708 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6709 break;
6711 default:
6712 break;
6715 return hstate.end ();
6718 /* These are the Hashtable callback functions. */
6720 /* Returns true iff the types are equivalent. */
6722 bool
6723 type_cache_hasher::equal (type_hash *a, type_hash *b)
6725 /* First test the things that are the same for all types. */
6726 if (a->hash != b->hash
6727 || TREE_CODE (a->type) != TREE_CODE (b->type)
6728 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6729 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6730 TYPE_ATTRIBUTES (b->type))
6731 || (TREE_CODE (a->type) != COMPLEX_TYPE
6732 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6733 return 0;
6735 /* Be careful about comparing arrays before and after the element type
6736 has been completed; don't compare TYPE_ALIGN unless both types are
6737 complete. */
6738 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6739 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6740 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6741 return 0;
6743 switch (TREE_CODE (a->type))
6745 case VOID_TYPE:
6746 case COMPLEX_TYPE:
6747 case POINTER_TYPE:
6748 case REFERENCE_TYPE:
6749 case NULLPTR_TYPE:
6750 return 1;
6752 case VECTOR_TYPE:
6753 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6754 TYPE_VECTOR_SUBPARTS (b->type));
6756 case ENUMERAL_TYPE:
6757 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6758 && !(TYPE_VALUES (a->type)
6759 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6760 && TYPE_VALUES (b->type)
6761 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6762 && type_list_equal (TYPE_VALUES (a->type),
6763 TYPE_VALUES (b->type))))
6764 return 0;
6766 /* fall through */
6768 case INTEGER_TYPE:
6769 case REAL_TYPE:
6770 case BOOLEAN_TYPE:
6771 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6772 return false;
6773 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6774 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6775 TYPE_MAX_VALUE (b->type)))
6776 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6777 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6778 TYPE_MIN_VALUE (b->type))));
6780 case FIXED_POINT_TYPE:
6781 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6783 case OFFSET_TYPE:
6784 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6786 case METHOD_TYPE:
6787 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6788 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6789 || (TYPE_ARG_TYPES (a->type)
6790 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6791 && TYPE_ARG_TYPES (b->type)
6792 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6793 && type_list_equal (TYPE_ARG_TYPES (a->type),
6794 TYPE_ARG_TYPES (b->type)))))
6795 break;
6796 return 0;
6797 case ARRAY_TYPE:
6798 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6799 where the flag should be inherited from the element type
6800 and can change after ARRAY_TYPEs are created; on non-aggregates
6801 compare it and hash it, scalars will never have that flag set
6802 and we need to differentiate between arrays created by different
6803 front-ends or middle-end created arrays. */
6804 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6805 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6806 || (TYPE_TYPELESS_STORAGE (a->type)
6807 == TYPE_TYPELESS_STORAGE (b->type))));
6809 case RECORD_TYPE:
6810 case UNION_TYPE:
6811 case QUAL_UNION_TYPE:
6812 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6813 || (TYPE_FIELDS (a->type)
6814 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6815 && TYPE_FIELDS (b->type)
6816 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6817 && type_list_equal (TYPE_FIELDS (a->type),
6818 TYPE_FIELDS (b->type))));
6820 case FUNCTION_TYPE:
6821 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6822 || (TYPE_ARG_TYPES (a->type)
6823 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6824 && TYPE_ARG_TYPES (b->type)
6825 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6826 && type_list_equal (TYPE_ARG_TYPES (a->type),
6827 TYPE_ARG_TYPES (b->type))))
6828 break;
6829 return 0;
6831 default:
6832 return 0;
6835 if (lang_hooks.types.type_hash_eq != NULL)
6836 return lang_hooks.types.type_hash_eq (a->type, b->type);
6838 return 1;
6841 /* Given TYPE, and HASHCODE its hash code, return the canonical
6842 object for an identical type if one already exists.
6843 Otherwise, return TYPE, and record it as the canonical object.
6845 To use this function, first create a type of the sort you want.
6846 Then compute its hash code from the fields of the type that
6847 make it different from other similar types.
6848 Then call this function and use the value. */
6850 tree
6851 type_hash_canon (unsigned int hashcode, tree type)
6853 type_hash in;
6854 type_hash **loc;
6856 /* The hash table only contains main variants, so ensure that's what we're
6857 being passed. */
6858 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6860 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6861 must call that routine before comparing TYPE_ALIGNs. */
6862 layout_type (type);
6864 in.hash = hashcode;
6865 in.type = type;
6867 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6868 if (*loc)
6870 tree t1 = ((type_hash *) *loc)->type;
6871 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6872 && t1 != type);
6873 if (TYPE_UID (type) + 1 == next_type_uid)
6874 --next_type_uid;
6875 /* Free also min/max values and the cache for integer
6876 types. This can't be done in free_node, as LTO frees
6877 those on its own. */
6878 if (TREE_CODE (type) == INTEGER_TYPE)
6880 if (TYPE_MIN_VALUE (type)
6881 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6883 /* Zero is always in TYPE_CACHED_VALUES. */
6884 if (! TYPE_UNSIGNED (type))
6885 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6886 ggc_free (TYPE_MIN_VALUE (type));
6888 if (TYPE_MAX_VALUE (type)
6889 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6891 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6892 ggc_free (TYPE_MAX_VALUE (type));
6894 if (TYPE_CACHED_VALUES_P (type))
6895 ggc_free (TYPE_CACHED_VALUES (type));
6897 free_node (type);
6898 return t1;
6900 else
6902 struct type_hash *h;
6904 h = ggc_alloc<type_hash> ();
6905 h->hash = hashcode;
6906 h->type = type;
6907 *loc = h;
6909 return type;
6913 static void
6914 print_type_hash_statistics (void)
6916 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6917 (long) type_hash_table->size (),
6918 (long) type_hash_table->elements (),
6919 type_hash_table->collisions ());
6922 /* Given two lists of types
6923 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6924 return 1 if the lists contain the same types in the same order.
6925 Also, the TREE_PURPOSEs must match. */
6927 bool
6928 type_list_equal (const_tree l1, const_tree l2)
6930 const_tree t1, t2;
6932 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6933 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6934 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6935 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6936 && (TREE_TYPE (TREE_PURPOSE (t1))
6937 == TREE_TYPE (TREE_PURPOSE (t2))))))
6938 return false;
6940 return t1 == t2;
6943 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6944 given by TYPE. If the argument list accepts variable arguments,
6945 then this function counts only the ordinary arguments. */
6948 type_num_arguments (const_tree fntype)
6950 int i = 0;
6952 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6953 /* If the function does not take a variable number of arguments,
6954 the last element in the list will have type `void'. */
6955 if (VOID_TYPE_P (TREE_VALUE (t)))
6956 break;
6957 else
6958 ++i;
6960 return i;
6963 /* Return the type of the function TYPE's argument ARGNO if known.
6964 For vararg function's where ARGNO refers to one of the variadic
6965 arguments return null. Otherwise, return a void_type_node for
6966 out-of-bounds ARGNO. */
6968 tree
6969 type_argument_type (const_tree fntype, unsigned argno)
6971 /* Treat zero the same as an out-of-bounds argument number. */
6972 if (!argno)
6973 return void_type_node;
6975 function_args_iterator iter;
6977 tree argtype;
6978 unsigned i = 1;
6979 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6981 /* A vararg function's argument list ends in a null. Otherwise,
6982 an ordinary function's argument list ends with void. Return
6983 null if ARGNO refers to a vararg argument, void_type_node if
6984 it's out of bounds, and the formal argument type otherwise. */
6985 if (!argtype)
6986 break;
6988 if (i == argno || VOID_TYPE_P (argtype))
6989 return argtype;
6991 ++i;
6994 return NULL_TREE;
6997 /* Nonzero if integer constants T1 and T2
6998 represent the same constant value. */
7001 tree_int_cst_equal (const_tree t1, const_tree t2)
7003 if (t1 == t2)
7004 return 1;
7006 if (t1 == 0 || t2 == 0)
7007 return 0;
7009 if (TREE_CODE (t1) == INTEGER_CST
7010 && TREE_CODE (t2) == INTEGER_CST
7011 && wi::to_widest (t1) == wi::to_widest (t2))
7012 return 1;
7014 return 0;
7017 /* Return true if T is an INTEGER_CST whose numerical value (extended
7018 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7020 bool
7021 tree_fits_shwi_p (const_tree t)
7023 return (t != NULL_TREE
7024 && TREE_CODE (t) == INTEGER_CST
7025 && wi::fits_shwi_p (wi::to_widest (t)));
7028 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7029 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7031 bool
7032 tree_fits_poly_int64_p (const_tree t)
7034 if (t == NULL_TREE)
7035 return false;
7036 if (POLY_INT_CST_P (t))
7038 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7039 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7040 return false;
7041 return true;
7043 return (TREE_CODE (t) == INTEGER_CST
7044 && wi::fits_shwi_p (wi::to_widest (t)));
7047 /* Return true if T is an INTEGER_CST whose numerical value (extended
7048 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7050 bool
7051 tree_fits_uhwi_p (const_tree t)
7053 return (t != NULL_TREE
7054 && TREE_CODE (t) == INTEGER_CST
7055 && wi::fits_uhwi_p (wi::to_widest (t)));
7058 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7059 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7061 bool
7062 tree_fits_poly_uint64_p (const_tree t)
7064 if (t == NULL_TREE)
7065 return false;
7066 if (POLY_INT_CST_P (t))
7068 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7069 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7070 return false;
7071 return true;
7073 return (TREE_CODE (t) == INTEGER_CST
7074 && wi::fits_uhwi_p (wi::to_widest (t)));
7077 /* T is an INTEGER_CST whose numerical value (extended according to
7078 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7079 HOST_WIDE_INT. */
7081 HOST_WIDE_INT
7082 tree_to_shwi (const_tree t)
7084 gcc_assert (tree_fits_shwi_p (t));
7085 return TREE_INT_CST_LOW (t);
7088 /* T is an INTEGER_CST whose numerical value (extended according to
7089 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7090 HOST_WIDE_INT. */
7092 unsigned HOST_WIDE_INT
7093 tree_to_uhwi (const_tree t)
7095 gcc_assert (tree_fits_uhwi_p (t));
7096 return TREE_INT_CST_LOW (t);
7099 /* Return the most significant (sign) bit of T. */
7102 tree_int_cst_sign_bit (const_tree t)
7104 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7106 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7109 /* Return an indication of the sign of the integer constant T.
7110 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7111 Note that -1 will never be returned if T's type is unsigned. */
7114 tree_int_cst_sgn (const_tree t)
7116 if (wi::to_wide (t) == 0)
7117 return 0;
7118 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7119 return 1;
7120 else if (wi::neg_p (wi::to_wide (t)))
7121 return -1;
7122 else
7123 return 1;
7126 /* Return the minimum number of bits needed to represent VALUE in a
7127 signed or unsigned type, UNSIGNEDP says which. */
7129 unsigned int
7130 tree_int_cst_min_precision (tree value, signop sgn)
7132 /* If the value is negative, compute its negative minus 1. The latter
7133 adjustment is because the absolute value of the largest negative value
7134 is one larger than the largest positive value. This is equivalent to
7135 a bit-wise negation, so use that operation instead. */
7137 if (tree_int_cst_sgn (value) < 0)
7138 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7140 /* Return the number of bits needed, taking into account the fact
7141 that we need one more bit for a signed than unsigned type.
7142 If value is 0 or -1, the minimum precision is 1 no matter
7143 whether unsignedp is true or false. */
7145 if (integer_zerop (value))
7146 return 1;
7147 else
7148 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7151 /* Return truthvalue of whether T1 is the same tree structure as T2.
7152 Return 1 if they are the same.
7153 Return 0 if they are understandably different.
7154 Return -1 if either contains tree structure not understood by
7155 this function. */
7158 simple_cst_equal (const_tree t1, const_tree t2)
7160 enum tree_code code1, code2;
7161 int cmp;
7162 int i;
7164 if (t1 == t2)
7165 return 1;
7166 if (t1 == 0 || t2 == 0)
7167 return 0;
7169 code1 = TREE_CODE (t1);
7170 code2 = TREE_CODE (t2);
7172 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7174 if (CONVERT_EXPR_CODE_P (code2)
7175 || code2 == NON_LVALUE_EXPR)
7176 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7177 else
7178 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7181 else if (CONVERT_EXPR_CODE_P (code2)
7182 || code2 == NON_LVALUE_EXPR)
7183 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7185 if (code1 != code2)
7186 return 0;
7188 switch (code1)
7190 case INTEGER_CST:
7191 return wi::to_widest (t1) == wi::to_widest (t2);
7193 case REAL_CST:
7194 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7196 case FIXED_CST:
7197 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7199 case STRING_CST:
7200 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7201 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7202 TREE_STRING_LENGTH (t1)));
7204 case CONSTRUCTOR:
7206 unsigned HOST_WIDE_INT idx;
7207 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7208 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7210 if (vec_safe_length (v1) != vec_safe_length (v2))
7211 return false;
7213 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7214 /* ??? Should we handle also fields here? */
7215 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7216 return false;
7217 return true;
7220 case SAVE_EXPR:
7221 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7223 case CALL_EXPR:
7224 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7225 if (cmp <= 0)
7226 return cmp;
7227 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7228 return 0;
7230 const_tree arg1, arg2;
7231 const_call_expr_arg_iterator iter1, iter2;
7232 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7233 arg2 = first_const_call_expr_arg (t2, &iter2);
7234 arg1 && arg2;
7235 arg1 = next_const_call_expr_arg (&iter1),
7236 arg2 = next_const_call_expr_arg (&iter2))
7238 cmp = simple_cst_equal (arg1, arg2);
7239 if (cmp <= 0)
7240 return cmp;
7242 return arg1 == arg2;
7245 case TARGET_EXPR:
7246 /* Special case: if either target is an unallocated VAR_DECL,
7247 it means that it's going to be unified with whatever the
7248 TARGET_EXPR is really supposed to initialize, so treat it
7249 as being equivalent to anything. */
7250 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7251 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7252 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7253 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7254 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7255 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7256 cmp = 1;
7257 else
7258 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7260 if (cmp <= 0)
7261 return cmp;
7263 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7265 case WITH_CLEANUP_EXPR:
7266 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7267 if (cmp <= 0)
7268 return cmp;
7270 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7272 case COMPONENT_REF:
7273 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7274 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7276 return 0;
7278 case VAR_DECL:
7279 case PARM_DECL:
7280 case CONST_DECL:
7281 case FUNCTION_DECL:
7282 return 0;
7284 default:
7285 if (POLY_INT_CST_P (t1))
7286 /* A false return means maybe_ne rather than known_ne. */
7287 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7288 TYPE_SIGN (TREE_TYPE (t1))),
7289 poly_widest_int::from (poly_int_cst_value (t2),
7290 TYPE_SIGN (TREE_TYPE (t2))));
7291 break;
7294 /* This general rule works for most tree codes. All exceptions should be
7295 handled above. If this is a language-specific tree code, we can't
7296 trust what might be in the operand, so say we don't know
7297 the situation. */
7298 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7299 return -1;
7301 switch (TREE_CODE_CLASS (code1))
7303 case tcc_unary:
7304 case tcc_binary:
7305 case tcc_comparison:
7306 case tcc_expression:
7307 case tcc_reference:
7308 case tcc_statement:
7309 cmp = 1;
7310 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7312 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7313 if (cmp <= 0)
7314 return cmp;
7317 return cmp;
7319 default:
7320 return -1;
7324 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7325 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7326 than U, respectively. */
7329 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7331 if (tree_int_cst_sgn (t) < 0)
7332 return -1;
7333 else if (!tree_fits_uhwi_p (t))
7334 return 1;
7335 else if (TREE_INT_CST_LOW (t) == u)
7336 return 0;
7337 else if (TREE_INT_CST_LOW (t) < u)
7338 return -1;
7339 else
7340 return 1;
7343 /* Return true if SIZE represents a constant size that is in bounds of
7344 what the middle-end and the backend accepts (covering not more than
7345 half of the address-space). */
7347 bool
7348 valid_constant_size_p (const_tree size)
7350 if (POLY_INT_CST_P (size))
7352 if (TREE_OVERFLOW (size))
7353 return false;
7354 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7355 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7356 return false;
7357 return true;
7359 if (! tree_fits_uhwi_p (size)
7360 || TREE_OVERFLOW (size)
7361 || tree_int_cst_sign_bit (size) != 0)
7362 return false;
7363 return true;
7366 /* Return the precision of the type, or for a complex or vector type the
7367 precision of the type of its elements. */
7369 unsigned int
7370 element_precision (const_tree type)
7372 if (!TYPE_P (type))
7373 type = TREE_TYPE (type);
7374 enum tree_code code = TREE_CODE (type);
7375 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7376 type = TREE_TYPE (type);
7378 return TYPE_PRECISION (type);
7381 /* Return true if CODE represents an associative tree code. Otherwise
7382 return false. */
7383 bool
7384 associative_tree_code (enum tree_code code)
7386 switch (code)
7388 case BIT_IOR_EXPR:
7389 case BIT_AND_EXPR:
7390 case BIT_XOR_EXPR:
7391 case PLUS_EXPR:
7392 case MULT_EXPR:
7393 case MIN_EXPR:
7394 case MAX_EXPR:
7395 return true;
7397 default:
7398 break;
7400 return false;
7403 /* Return true if CODE represents a commutative tree code. Otherwise
7404 return false. */
7405 bool
7406 commutative_tree_code (enum tree_code code)
7408 switch (code)
7410 case PLUS_EXPR:
7411 case MULT_EXPR:
7412 case MULT_HIGHPART_EXPR:
7413 case MIN_EXPR:
7414 case MAX_EXPR:
7415 case BIT_IOR_EXPR:
7416 case BIT_XOR_EXPR:
7417 case BIT_AND_EXPR:
7418 case NE_EXPR:
7419 case EQ_EXPR:
7420 case UNORDERED_EXPR:
7421 case ORDERED_EXPR:
7422 case UNEQ_EXPR:
7423 case LTGT_EXPR:
7424 case TRUTH_AND_EXPR:
7425 case TRUTH_XOR_EXPR:
7426 case TRUTH_OR_EXPR:
7427 case WIDEN_MULT_EXPR:
7428 case VEC_WIDEN_MULT_HI_EXPR:
7429 case VEC_WIDEN_MULT_LO_EXPR:
7430 case VEC_WIDEN_MULT_EVEN_EXPR:
7431 case VEC_WIDEN_MULT_ODD_EXPR:
7432 return true;
7434 default:
7435 break;
7437 return false;
7440 /* Return true if CODE represents a ternary tree code for which the
7441 first two operands are commutative. Otherwise return false. */
7442 bool
7443 commutative_ternary_tree_code (enum tree_code code)
7445 switch (code)
7447 case WIDEN_MULT_PLUS_EXPR:
7448 case WIDEN_MULT_MINUS_EXPR:
7449 case DOT_PROD_EXPR:
7450 return true;
7452 default:
7453 break;
7455 return false;
7458 /* Returns true if CODE can overflow. */
7460 bool
7461 operation_can_overflow (enum tree_code code)
7463 switch (code)
7465 case PLUS_EXPR:
7466 case MINUS_EXPR:
7467 case MULT_EXPR:
7468 case LSHIFT_EXPR:
7469 /* Can overflow in various ways. */
7470 return true;
7471 case TRUNC_DIV_EXPR:
7472 case EXACT_DIV_EXPR:
7473 case FLOOR_DIV_EXPR:
7474 case CEIL_DIV_EXPR:
7475 /* For INT_MIN / -1. */
7476 return true;
7477 case NEGATE_EXPR:
7478 case ABS_EXPR:
7479 /* For -INT_MIN. */
7480 return true;
7481 default:
7482 /* These operators cannot overflow. */
7483 return false;
7487 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7488 ftrapv doesn't generate trapping insns for CODE. */
7490 bool
7491 operation_no_trapping_overflow (tree type, enum tree_code code)
7493 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7495 /* We don't generate instructions that trap on overflow for complex or vector
7496 types. */
7497 if (!INTEGRAL_TYPE_P (type))
7498 return true;
7500 if (!TYPE_OVERFLOW_TRAPS (type))
7501 return true;
7503 switch (code)
7505 case PLUS_EXPR:
7506 case MINUS_EXPR:
7507 case MULT_EXPR:
7508 case NEGATE_EXPR:
7509 case ABS_EXPR:
7510 /* These operators can overflow, and -ftrapv generates trapping code for
7511 these. */
7512 return false;
7513 case TRUNC_DIV_EXPR:
7514 case EXACT_DIV_EXPR:
7515 case FLOOR_DIV_EXPR:
7516 case CEIL_DIV_EXPR:
7517 case LSHIFT_EXPR:
7518 /* These operators can overflow, but -ftrapv does not generate trapping
7519 code for these. */
7520 return true;
7521 default:
7522 /* These operators cannot overflow. */
7523 return true;
7527 namespace inchash
7530 /* Generate a hash value for an expression. This can be used iteratively
7531 by passing a previous result as the HSTATE argument.
7533 This function is intended to produce the same hash for expressions which
7534 would compare equal using operand_equal_p. */
7535 void
7536 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7538 int i;
7539 enum tree_code code;
7540 enum tree_code_class tclass;
7542 if (t == NULL_TREE || t == error_mark_node)
7544 hstate.merge_hash (0);
7545 return;
7548 if (!(flags & OEP_ADDRESS_OF))
7549 STRIP_NOPS (t);
7551 code = TREE_CODE (t);
7553 switch (code)
7555 /* Alas, constants aren't shared, so we can't rely on pointer
7556 identity. */
7557 case VOID_CST:
7558 hstate.merge_hash (0);
7559 return;
7560 case INTEGER_CST:
7561 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7562 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7563 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7564 return;
7565 case REAL_CST:
7567 unsigned int val2;
7568 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7569 val2 = rvc_zero;
7570 else
7571 val2 = real_hash (TREE_REAL_CST_PTR (t));
7572 hstate.merge_hash (val2);
7573 return;
7575 case FIXED_CST:
7577 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7578 hstate.merge_hash (val2);
7579 return;
7581 case STRING_CST:
7582 hstate.add ((const void *) TREE_STRING_POINTER (t),
7583 TREE_STRING_LENGTH (t));
7584 return;
7585 case COMPLEX_CST:
7586 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7587 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7588 return;
7589 case VECTOR_CST:
7591 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7592 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7593 unsigned int count = vector_cst_encoded_nelts (t);
7594 for (unsigned int i = 0; i < count; ++i)
7595 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7596 return;
7598 case SSA_NAME:
7599 /* We can just compare by pointer. */
7600 hstate.add_hwi (SSA_NAME_VERSION (t));
7601 return;
7602 case PLACEHOLDER_EXPR:
7603 /* The node itself doesn't matter. */
7604 return;
7605 case BLOCK:
7606 case OMP_CLAUSE:
7607 /* Ignore. */
7608 return;
7609 case TREE_LIST:
7610 /* A list of expressions, for a CALL_EXPR or as the elements of a
7611 VECTOR_CST. */
7612 for (; t; t = TREE_CHAIN (t))
7613 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7614 return;
7615 case CONSTRUCTOR:
7617 unsigned HOST_WIDE_INT idx;
7618 tree field, value;
7619 flags &= ~OEP_ADDRESS_OF;
7620 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7622 inchash::add_expr (field, hstate, flags);
7623 inchash::add_expr (value, hstate, flags);
7625 return;
7627 case STATEMENT_LIST:
7629 tree_stmt_iterator i;
7630 for (i = tsi_start (CONST_CAST_TREE (t));
7631 !tsi_end_p (i); tsi_next (&i))
7632 inchash::add_expr (tsi_stmt (i), hstate, flags);
7633 return;
7635 case TREE_VEC:
7636 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7637 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7638 return;
7639 case IDENTIFIER_NODE:
7640 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
7641 return;
7642 case FUNCTION_DECL:
7643 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7644 Otherwise nodes that compare equal according to operand_equal_p might
7645 get different hash codes. However, don't do this for machine specific
7646 or front end builtins, since the function code is overloaded in those
7647 cases. */
7648 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7649 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7651 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7652 code = TREE_CODE (t);
7654 /* FALL THROUGH */
7655 default:
7656 if (POLY_INT_CST_P (t))
7658 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7659 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7660 return;
7662 tclass = TREE_CODE_CLASS (code);
7664 if (tclass == tcc_declaration)
7666 /* DECL's have a unique ID */
7667 hstate.add_hwi (DECL_UID (t));
7669 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7671 /* For comparisons that can be swapped, use the lower
7672 tree code. */
7673 enum tree_code ccode = swap_tree_comparison (code);
7674 if (code < ccode)
7675 ccode = code;
7676 hstate.add_object (ccode);
7677 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7678 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7680 else if (CONVERT_EXPR_CODE_P (code))
7682 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7683 operand_equal_p. */
7684 enum tree_code ccode = NOP_EXPR;
7685 hstate.add_object (ccode);
7687 /* Don't hash the type, that can lead to having nodes which
7688 compare equal according to operand_equal_p, but which
7689 have different hash codes. Make sure to include signedness
7690 in the hash computation. */
7691 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7692 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7694 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7695 else if (code == MEM_REF
7696 && (flags & OEP_ADDRESS_OF) != 0
7697 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7698 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7699 && integer_zerop (TREE_OPERAND (t, 1)))
7700 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7701 hstate, flags);
7702 /* Don't ICE on FE specific trees, or their arguments etc.
7703 during operand_equal_p hash verification. */
7704 else if (!IS_EXPR_CODE_CLASS (tclass))
7705 gcc_assert (flags & OEP_HASH_CHECK);
7706 else
7708 unsigned int sflags = flags;
7710 hstate.add_object (code);
7712 switch (code)
7714 case ADDR_EXPR:
7715 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7716 flags |= OEP_ADDRESS_OF;
7717 sflags = flags;
7718 break;
7720 case INDIRECT_REF:
7721 case MEM_REF:
7722 case TARGET_MEM_REF:
7723 flags &= ~OEP_ADDRESS_OF;
7724 sflags = flags;
7725 break;
7727 case ARRAY_REF:
7728 case ARRAY_RANGE_REF:
7729 case COMPONENT_REF:
7730 case BIT_FIELD_REF:
7731 sflags &= ~OEP_ADDRESS_OF;
7732 break;
7734 case COND_EXPR:
7735 flags &= ~OEP_ADDRESS_OF;
7736 break;
7738 case WIDEN_MULT_PLUS_EXPR:
7739 case WIDEN_MULT_MINUS_EXPR:
7741 /* The multiplication operands are commutative. */
7742 inchash::hash one, two;
7743 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7744 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7745 hstate.add_commutative (one, two);
7746 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7747 return;
7750 case CALL_EXPR:
7751 if (CALL_EXPR_FN (t) == NULL_TREE)
7752 hstate.add_int (CALL_EXPR_IFN (t));
7753 break;
7755 case TARGET_EXPR:
7756 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7757 Usually different TARGET_EXPRs just should use
7758 different temporaries in their slots. */
7759 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7760 return;
7762 default:
7763 break;
7766 /* Don't hash the type, that can lead to having nodes which
7767 compare equal according to operand_equal_p, but which
7768 have different hash codes. */
7769 if (code == NON_LVALUE_EXPR)
7771 /* Make sure to include signness in the hash computation. */
7772 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7773 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7776 else if (commutative_tree_code (code))
7778 /* It's a commutative expression. We want to hash it the same
7779 however it appears. We do this by first hashing both operands
7780 and then rehashing based on the order of their independent
7781 hashes. */
7782 inchash::hash one, two;
7783 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7784 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7785 hstate.add_commutative (one, two);
7787 else
7788 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7789 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7790 i == 0 ? flags : sflags);
7792 return;
7798 /* Constructors for pointer, array and function types.
7799 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7800 constructed by language-dependent code, not here.) */
7802 /* Construct, lay out and return the type of pointers to TO_TYPE with
7803 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7804 reference all of memory. If such a type has already been
7805 constructed, reuse it. */
7807 tree
7808 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7809 bool can_alias_all)
7811 tree t;
7812 bool could_alias = can_alias_all;
7814 if (to_type == error_mark_node)
7815 return error_mark_node;
7817 /* If the pointed-to type has the may_alias attribute set, force
7818 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7819 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7820 can_alias_all = true;
7822 /* In some cases, languages will have things that aren't a POINTER_TYPE
7823 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7824 In that case, return that type without regard to the rest of our
7825 operands.
7827 ??? This is a kludge, but consistent with the way this function has
7828 always operated and there doesn't seem to be a good way to avoid this
7829 at the moment. */
7830 if (TYPE_POINTER_TO (to_type) != 0
7831 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7832 return TYPE_POINTER_TO (to_type);
7834 /* First, if we already have a type for pointers to TO_TYPE and it's
7835 the proper mode, use it. */
7836 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7837 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7838 return t;
7840 t = make_node (POINTER_TYPE);
7842 TREE_TYPE (t) = to_type;
7843 SET_TYPE_MODE (t, mode);
7844 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7845 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7846 TYPE_POINTER_TO (to_type) = t;
7848 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7849 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7850 SET_TYPE_STRUCTURAL_EQUALITY (t);
7851 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7852 TYPE_CANONICAL (t)
7853 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7854 mode, false);
7856 /* Lay out the type. This function has many callers that are concerned
7857 with expression-construction, and this simplifies them all. */
7858 layout_type (t);
7860 return t;
7863 /* By default build pointers in ptr_mode. */
7865 tree
7866 build_pointer_type (tree to_type)
7868 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7869 : TYPE_ADDR_SPACE (to_type);
7870 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7871 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7874 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7876 tree
7877 build_reference_type_for_mode (tree to_type, machine_mode mode,
7878 bool can_alias_all)
7880 tree t;
7881 bool could_alias = can_alias_all;
7883 if (to_type == error_mark_node)
7884 return error_mark_node;
7886 /* If the pointed-to type has the may_alias attribute set, force
7887 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7888 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7889 can_alias_all = true;
7891 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7892 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7893 In that case, return that type without regard to the rest of our
7894 operands.
7896 ??? This is a kludge, but consistent with the way this function has
7897 always operated and there doesn't seem to be a good way to avoid this
7898 at the moment. */
7899 if (TYPE_REFERENCE_TO (to_type) != 0
7900 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7901 return TYPE_REFERENCE_TO (to_type);
7903 /* First, if we already have a type for pointers to TO_TYPE and it's
7904 the proper mode, use it. */
7905 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7906 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7907 return t;
7909 t = make_node (REFERENCE_TYPE);
7911 TREE_TYPE (t) = to_type;
7912 SET_TYPE_MODE (t, mode);
7913 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7914 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7915 TYPE_REFERENCE_TO (to_type) = t;
7917 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7918 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7919 SET_TYPE_STRUCTURAL_EQUALITY (t);
7920 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7921 TYPE_CANONICAL (t)
7922 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7923 mode, false);
7925 layout_type (t);
7927 return t;
7931 /* Build the node for the type of references-to-TO_TYPE by default
7932 in ptr_mode. */
7934 tree
7935 build_reference_type (tree to_type)
7937 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7938 : TYPE_ADDR_SPACE (to_type);
7939 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7940 return build_reference_type_for_mode (to_type, pointer_mode, false);
7943 #define MAX_INT_CACHED_PREC \
7944 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7945 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7947 /* Builds a signed or unsigned integer type of precision PRECISION.
7948 Used for C bitfields whose precision does not match that of
7949 built-in target types. */
7950 tree
7951 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7952 int unsignedp)
7954 tree itype, ret;
7956 if (unsignedp)
7957 unsignedp = MAX_INT_CACHED_PREC + 1;
7959 if (precision <= MAX_INT_CACHED_PREC)
7961 itype = nonstandard_integer_type_cache[precision + unsignedp];
7962 if (itype)
7963 return itype;
7966 itype = make_node (INTEGER_TYPE);
7967 TYPE_PRECISION (itype) = precision;
7969 if (unsignedp)
7970 fixup_unsigned_type (itype);
7971 else
7972 fixup_signed_type (itype);
7974 ret = itype;
7976 inchash::hash hstate;
7977 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7978 ret = type_hash_canon (hstate.end (), itype);
7979 if (precision <= MAX_INT_CACHED_PREC)
7980 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7982 return ret;
7985 #define MAX_BOOL_CACHED_PREC \
7986 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7987 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7989 /* Builds a boolean type of precision PRECISION.
7990 Used for boolean vectors to choose proper vector element size. */
7991 tree
7992 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7994 tree type;
7996 if (precision <= MAX_BOOL_CACHED_PREC)
7998 type = nonstandard_boolean_type_cache[precision];
7999 if (type)
8000 return type;
8003 type = make_node (BOOLEAN_TYPE);
8004 TYPE_PRECISION (type) = precision;
8005 fixup_signed_type (type);
8007 if (precision <= MAX_INT_CACHED_PREC)
8008 nonstandard_boolean_type_cache[precision] = type;
8010 return type;
8013 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8014 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8015 is true, reuse such a type that has already been constructed. */
8017 static tree
8018 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8020 tree itype = make_node (INTEGER_TYPE);
8022 TREE_TYPE (itype) = type;
8024 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8025 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8027 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8028 SET_TYPE_MODE (itype, TYPE_MODE (type));
8029 TYPE_SIZE (itype) = TYPE_SIZE (type);
8030 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8031 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8032 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8033 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8035 if (!shared)
8036 return itype;
8038 if ((TYPE_MIN_VALUE (itype)
8039 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8040 || (TYPE_MAX_VALUE (itype)
8041 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8043 /* Since we cannot reliably merge this type, we need to compare it using
8044 structural equality checks. */
8045 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8046 return itype;
8049 hashval_t hash = type_hash_canon_hash (itype);
8050 itype = type_hash_canon (hash, itype);
8052 return itype;
8055 /* Wrapper around build_range_type_1 with SHARED set to true. */
8057 tree
8058 build_range_type (tree type, tree lowval, tree highval)
8060 return build_range_type_1 (type, lowval, highval, true);
8063 /* Wrapper around build_range_type_1 with SHARED set to false. */
8065 tree
8066 build_nonshared_range_type (tree type, tree lowval, tree highval)
8068 return build_range_type_1 (type, lowval, highval, false);
8071 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8072 MAXVAL should be the maximum value in the domain
8073 (one less than the length of the array).
8075 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8076 We don't enforce this limit, that is up to caller (e.g. language front end).
8077 The limit exists because the result is a signed type and we don't handle
8078 sizes that use more than one HOST_WIDE_INT. */
8080 tree
8081 build_index_type (tree maxval)
8083 return build_range_type (sizetype, size_zero_node, maxval);
8086 /* Return true if the debug information for TYPE, a subtype, should be emitted
8087 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8088 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8089 debug info and doesn't reflect the source code. */
8091 bool
8092 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8094 tree base_type = TREE_TYPE (type), low, high;
8096 /* Subrange types have a base type which is an integral type. */
8097 if (!INTEGRAL_TYPE_P (base_type))
8098 return false;
8100 /* Get the real bounds of the subtype. */
8101 if (lang_hooks.types.get_subrange_bounds)
8102 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8103 else
8105 low = TYPE_MIN_VALUE (type);
8106 high = TYPE_MAX_VALUE (type);
8109 /* If the type and its base type have the same representation and the same
8110 name, then the type is not a subrange but a copy of the base type. */
8111 if ((TREE_CODE (base_type) == INTEGER_TYPE
8112 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8113 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8114 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8115 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8116 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8117 return false;
8119 if (lowval)
8120 *lowval = low;
8121 if (highval)
8122 *highval = high;
8123 return true;
8126 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8127 and number of elements specified by the range of values of INDEX_TYPE.
8128 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8129 If SHARED is true, reuse such a type that has already been constructed. */
8131 static tree
8132 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8133 bool shared)
8135 tree t;
8137 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8139 error ("arrays of functions are not meaningful");
8140 elt_type = integer_type_node;
8143 t = make_node (ARRAY_TYPE);
8144 TREE_TYPE (t) = elt_type;
8145 TYPE_DOMAIN (t) = index_type;
8146 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8147 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8148 layout_type (t);
8150 /* If the element type is incomplete at this point we get marked for
8151 structural equality. Do not record these types in the canonical
8152 type hashtable. */
8153 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8154 return t;
8156 if (shared)
8158 hashval_t hash = type_hash_canon_hash (t);
8159 t = type_hash_canon (hash, t);
8162 if (TYPE_CANONICAL (t) == t)
8164 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8165 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8166 || in_lto_p)
8167 SET_TYPE_STRUCTURAL_EQUALITY (t);
8168 else if (TYPE_CANONICAL (elt_type) != elt_type
8169 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8170 TYPE_CANONICAL (t)
8171 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8172 index_type
8173 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8174 typeless_storage, shared);
8177 return t;
8180 /* Wrapper around build_array_type_1 with SHARED set to true. */
8182 tree
8183 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8185 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8188 /* Wrapper around build_array_type_1 with SHARED set to false. */
8190 tree
8191 build_nonshared_array_type (tree elt_type, tree index_type)
8193 return build_array_type_1 (elt_type, index_type, false, false);
8196 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8197 sizetype. */
8199 tree
8200 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8202 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8205 /* Recursively examines the array elements of TYPE, until a non-array
8206 element type is found. */
8208 tree
8209 strip_array_types (tree type)
8211 while (TREE_CODE (type) == ARRAY_TYPE)
8212 type = TREE_TYPE (type);
8214 return type;
8217 /* Computes the canonical argument types from the argument type list
8218 ARGTYPES.
8220 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8221 on entry to this function, or if any of the ARGTYPES are
8222 structural.
8224 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8225 true on entry to this function, or if any of the ARGTYPES are
8226 non-canonical.
8228 Returns a canonical argument list, which may be ARGTYPES when the
8229 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8230 true) or would not differ from ARGTYPES. */
8232 static tree
8233 maybe_canonicalize_argtypes (tree argtypes,
8234 bool *any_structural_p,
8235 bool *any_noncanonical_p)
8237 tree arg;
8238 bool any_noncanonical_argtypes_p = false;
8240 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8242 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8243 /* Fail gracefully by stating that the type is structural. */
8244 *any_structural_p = true;
8245 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8246 *any_structural_p = true;
8247 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8248 || TREE_PURPOSE (arg))
8249 /* If the argument has a default argument, we consider it
8250 non-canonical even though the type itself is canonical.
8251 That way, different variants of function and method types
8252 with default arguments will all point to the variant with
8253 no defaults as their canonical type. */
8254 any_noncanonical_argtypes_p = true;
8257 if (*any_structural_p)
8258 return argtypes;
8260 if (any_noncanonical_argtypes_p)
8262 /* Build the canonical list of argument types. */
8263 tree canon_argtypes = NULL_TREE;
8264 bool is_void = false;
8266 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8268 if (arg == void_list_node)
8269 is_void = true;
8270 else
8271 canon_argtypes = tree_cons (NULL_TREE,
8272 TYPE_CANONICAL (TREE_VALUE (arg)),
8273 canon_argtypes);
8276 canon_argtypes = nreverse (canon_argtypes);
8277 if (is_void)
8278 canon_argtypes = chainon (canon_argtypes, void_list_node);
8280 /* There is a non-canonical type. */
8281 *any_noncanonical_p = true;
8282 return canon_argtypes;
8285 /* The canonical argument types are the same as ARGTYPES. */
8286 return argtypes;
8289 /* Construct, lay out and return
8290 the type of functions returning type VALUE_TYPE
8291 given arguments of types ARG_TYPES.
8292 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8293 are data type nodes for the arguments of the function.
8294 If such a type has already been constructed, reuse it. */
8296 tree
8297 build_function_type (tree value_type, tree arg_types)
8299 tree t;
8300 inchash::hash hstate;
8301 bool any_structural_p, any_noncanonical_p;
8302 tree canon_argtypes;
8304 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8306 error ("function return type cannot be function");
8307 value_type = integer_type_node;
8310 /* Make a node of the sort we want. */
8311 t = make_node (FUNCTION_TYPE);
8312 TREE_TYPE (t) = value_type;
8313 TYPE_ARG_TYPES (t) = arg_types;
8315 /* If we already have such a type, use the old one. */
8316 hashval_t hash = type_hash_canon_hash (t);
8317 t = type_hash_canon (hash, t);
8319 /* Set up the canonical type. */
8320 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8321 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8322 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8323 &any_structural_p,
8324 &any_noncanonical_p);
8325 if (any_structural_p)
8326 SET_TYPE_STRUCTURAL_EQUALITY (t);
8327 else if (any_noncanonical_p)
8328 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8329 canon_argtypes);
8331 if (!COMPLETE_TYPE_P (t))
8332 layout_type (t);
8333 return t;
8336 /* Build a function type. The RETURN_TYPE is the type returned by the
8337 function. If VAARGS is set, no void_type_node is appended to the
8338 list. ARGP must be always be terminated be a NULL_TREE. */
8340 static tree
8341 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8343 tree t, args, last;
8345 t = va_arg (argp, tree);
8346 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8347 args = tree_cons (NULL_TREE, t, args);
8349 if (vaargs)
8351 last = args;
8352 if (args != NULL_TREE)
8353 args = nreverse (args);
8354 gcc_assert (last != void_list_node);
8356 else if (args == NULL_TREE)
8357 args = void_list_node;
8358 else
8360 last = args;
8361 args = nreverse (args);
8362 TREE_CHAIN (last) = void_list_node;
8364 args = build_function_type (return_type, args);
8366 return args;
8369 /* Build a function type. The RETURN_TYPE is the type returned by the
8370 function. If additional arguments are provided, they are
8371 additional argument types. The list of argument types must always
8372 be terminated by NULL_TREE. */
8374 tree
8375 build_function_type_list (tree return_type, ...)
8377 tree args;
8378 va_list p;
8380 va_start (p, return_type);
8381 args = build_function_type_list_1 (false, return_type, p);
8382 va_end (p);
8383 return args;
8386 /* Build a variable argument function type. The RETURN_TYPE is the
8387 type returned by the function. If additional arguments are provided,
8388 they are additional argument types. The list of argument types must
8389 always be terminated by NULL_TREE. */
8391 tree
8392 build_varargs_function_type_list (tree return_type, ...)
8394 tree args;
8395 va_list p;
8397 va_start (p, return_type);
8398 args = build_function_type_list_1 (true, return_type, p);
8399 va_end (p);
8401 return args;
8404 /* Build a function type. RETURN_TYPE is the type returned by the
8405 function; VAARGS indicates whether the function takes varargs. The
8406 function takes N named arguments, the types of which are provided in
8407 ARG_TYPES. */
8409 static tree
8410 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8411 tree *arg_types)
8413 int i;
8414 tree t = vaargs ? NULL_TREE : void_list_node;
8416 for (i = n - 1; i >= 0; i--)
8417 t = tree_cons (NULL_TREE, arg_types[i], t);
8419 return build_function_type (return_type, t);
8422 /* Build a function type. RETURN_TYPE is the type returned by the
8423 function. The function takes N named arguments, the types of which
8424 are provided in ARG_TYPES. */
8426 tree
8427 build_function_type_array (tree return_type, int n, tree *arg_types)
8429 return build_function_type_array_1 (false, return_type, n, arg_types);
8432 /* Build a variable argument function type. RETURN_TYPE is the type
8433 returned by the function. The function takes N named arguments, the
8434 types of which are provided in ARG_TYPES. */
8436 tree
8437 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8439 return build_function_type_array_1 (true, return_type, n, arg_types);
8442 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8443 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8444 for the method. An implicit additional parameter (of type
8445 pointer-to-BASETYPE) is added to the ARGTYPES. */
8447 tree
8448 build_method_type_directly (tree basetype,
8449 tree rettype,
8450 tree argtypes)
8452 tree t;
8453 tree ptype;
8454 bool any_structural_p, any_noncanonical_p;
8455 tree canon_argtypes;
8457 /* Make a node of the sort we want. */
8458 t = make_node (METHOD_TYPE);
8460 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8461 TREE_TYPE (t) = rettype;
8462 ptype = build_pointer_type (basetype);
8464 /* The actual arglist for this function includes a "hidden" argument
8465 which is "this". Put it into the list of argument types. */
8466 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8467 TYPE_ARG_TYPES (t) = argtypes;
8469 /* If we already have such a type, use the old one. */
8470 hashval_t hash = type_hash_canon_hash (t);
8471 t = type_hash_canon (hash, t);
8473 /* Set up the canonical type. */
8474 any_structural_p
8475 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8476 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8477 any_noncanonical_p
8478 = (TYPE_CANONICAL (basetype) != basetype
8479 || TYPE_CANONICAL (rettype) != rettype);
8480 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8481 &any_structural_p,
8482 &any_noncanonical_p);
8483 if (any_structural_p)
8484 SET_TYPE_STRUCTURAL_EQUALITY (t);
8485 else if (any_noncanonical_p)
8486 TYPE_CANONICAL (t)
8487 = build_method_type_directly (TYPE_CANONICAL (basetype),
8488 TYPE_CANONICAL (rettype),
8489 canon_argtypes);
8490 if (!COMPLETE_TYPE_P (t))
8491 layout_type (t);
8493 return t;
8496 /* Construct, lay out and return the type of methods belonging to class
8497 BASETYPE and whose arguments and values are described by TYPE.
8498 If that type exists already, reuse it.
8499 TYPE must be a FUNCTION_TYPE node. */
8501 tree
8502 build_method_type (tree basetype, tree type)
8504 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8506 return build_method_type_directly (basetype,
8507 TREE_TYPE (type),
8508 TYPE_ARG_TYPES (type));
8511 /* Construct, lay out and return the type of offsets to a value
8512 of type TYPE, within an object of type BASETYPE.
8513 If a suitable offset type exists already, reuse it. */
8515 tree
8516 build_offset_type (tree basetype, tree type)
8518 tree t;
8520 /* Make a node of the sort we want. */
8521 t = make_node (OFFSET_TYPE);
8523 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8524 TREE_TYPE (t) = type;
8526 /* If we already have such a type, use the old one. */
8527 hashval_t hash = type_hash_canon_hash (t);
8528 t = type_hash_canon (hash, t);
8530 if (!COMPLETE_TYPE_P (t))
8531 layout_type (t);
8533 if (TYPE_CANONICAL (t) == t)
8535 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8536 || TYPE_STRUCTURAL_EQUALITY_P (type))
8537 SET_TYPE_STRUCTURAL_EQUALITY (t);
8538 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8539 || TYPE_CANONICAL (type) != type)
8540 TYPE_CANONICAL (t)
8541 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8542 TYPE_CANONICAL (type));
8545 return t;
8548 /* Create a complex type whose components are COMPONENT_TYPE.
8550 If NAMED is true, the type is given a TYPE_NAME. We do not always
8551 do so because this creates a DECL node and thus make the DECL_UIDs
8552 dependent on the type canonicalization hashtable, which is GC-ed,
8553 so the DECL_UIDs would not be stable wrt garbage collection. */
8555 tree
8556 build_complex_type (tree component_type, bool named)
8558 gcc_assert (INTEGRAL_TYPE_P (component_type)
8559 || SCALAR_FLOAT_TYPE_P (component_type)
8560 || FIXED_POINT_TYPE_P (component_type));
8562 /* Make a node of the sort we want. */
8563 tree probe = make_node (COMPLEX_TYPE);
8565 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8567 /* If we already have such a type, use the old one. */
8568 hashval_t hash = type_hash_canon_hash (probe);
8569 tree t = type_hash_canon (hash, probe);
8571 if (t == probe)
8573 /* We created a new type. The hash insertion will have laid
8574 out the type. We need to check the canonicalization and
8575 maybe set the name. */
8576 gcc_checking_assert (COMPLETE_TYPE_P (t)
8577 && !TYPE_NAME (t)
8578 && TYPE_CANONICAL (t) == t);
8580 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8581 SET_TYPE_STRUCTURAL_EQUALITY (t);
8582 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8583 TYPE_CANONICAL (t)
8584 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8586 /* We need to create a name, since complex is a fundamental type. */
8587 if (named)
8589 const char *name = NULL;
8591 if (TREE_TYPE (t) == char_type_node)
8592 name = "complex char";
8593 else if (TREE_TYPE (t) == signed_char_type_node)
8594 name = "complex signed char";
8595 else if (TREE_TYPE (t) == unsigned_char_type_node)
8596 name = "complex unsigned char";
8597 else if (TREE_TYPE (t) == short_integer_type_node)
8598 name = "complex short int";
8599 else if (TREE_TYPE (t) == short_unsigned_type_node)
8600 name = "complex short unsigned int";
8601 else if (TREE_TYPE (t) == integer_type_node)
8602 name = "complex int";
8603 else if (TREE_TYPE (t) == unsigned_type_node)
8604 name = "complex unsigned int";
8605 else if (TREE_TYPE (t) == long_integer_type_node)
8606 name = "complex long int";
8607 else if (TREE_TYPE (t) == long_unsigned_type_node)
8608 name = "complex long unsigned int";
8609 else if (TREE_TYPE (t) == long_long_integer_type_node)
8610 name = "complex long long int";
8611 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8612 name = "complex long long unsigned int";
8614 if (name != NULL)
8615 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8616 get_identifier (name), t);
8620 return build_qualified_type (t, TYPE_QUALS (component_type));
8623 /* If TYPE is a real or complex floating-point type and the target
8624 does not directly support arithmetic on TYPE then return the wider
8625 type to be used for arithmetic on TYPE. Otherwise, return
8626 NULL_TREE. */
8628 tree
8629 excess_precision_type (tree type)
8631 /* The target can give two different responses to the question of
8632 which excess precision mode it would like depending on whether we
8633 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8635 enum excess_precision_type requested_type
8636 = (flag_excess_precision == EXCESS_PRECISION_FAST
8637 ? EXCESS_PRECISION_TYPE_FAST
8638 : EXCESS_PRECISION_TYPE_STANDARD);
8640 enum flt_eval_method target_flt_eval_method
8641 = targetm.c.excess_precision (requested_type);
8643 /* The target should not ask for unpredictable float evaluation (though
8644 it might advertise that implicitly the evaluation is unpredictable,
8645 but we don't care about that here, it will have been reported
8646 elsewhere). If it does ask for unpredictable evaluation, we have
8647 nothing to do here. */
8648 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8650 /* Nothing to do. The target has asked for all types we know about
8651 to be computed with their native precision and range. */
8652 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8653 return NULL_TREE;
8655 /* The target will promote this type in a target-dependent way, so excess
8656 precision ought to leave it alone. */
8657 if (targetm.promoted_type (type) != NULL_TREE)
8658 return NULL_TREE;
8660 machine_mode float16_type_mode = (float16_type_node
8661 ? TYPE_MODE (float16_type_node)
8662 : VOIDmode);
8663 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8664 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8666 switch (TREE_CODE (type))
8668 case REAL_TYPE:
8670 machine_mode type_mode = TYPE_MODE (type);
8671 switch (target_flt_eval_method)
8673 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8674 if (type_mode == float16_type_mode)
8675 return float_type_node;
8676 break;
8677 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8678 if (type_mode == float16_type_mode
8679 || type_mode == float_type_mode)
8680 return double_type_node;
8681 break;
8682 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8683 if (type_mode == float16_type_mode
8684 || type_mode == float_type_mode
8685 || type_mode == double_type_mode)
8686 return long_double_type_node;
8687 break;
8688 default:
8689 gcc_unreachable ();
8691 break;
8693 case COMPLEX_TYPE:
8695 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8696 return NULL_TREE;
8697 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8698 switch (target_flt_eval_method)
8700 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8701 if (type_mode == float16_type_mode)
8702 return complex_float_type_node;
8703 break;
8704 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8705 if (type_mode == float16_type_mode
8706 || type_mode == float_type_mode)
8707 return complex_double_type_node;
8708 break;
8709 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8710 if (type_mode == float16_type_mode
8711 || type_mode == float_type_mode
8712 || type_mode == double_type_mode)
8713 return complex_long_double_type_node;
8714 break;
8715 default:
8716 gcc_unreachable ();
8718 break;
8720 default:
8721 break;
8724 return NULL_TREE;
8727 /* Return OP, stripped of any conversions to wider types as much as is safe.
8728 Converting the value back to OP's type makes a value equivalent to OP.
8730 If FOR_TYPE is nonzero, we return a value which, if converted to
8731 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8733 OP must have integer, real or enumeral type. Pointers are not allowed!
8735 There are some cases where the obvious value we could return
8736 would regenerate to OP if converted to OP's type,
8737 but would not extend like OP to wider types.
8738 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8739 For example, if OP is (unsigned short)(signed char)-1,
8740 we avoid returning (signed char)-1 if FOR_TYPE is int,
8741 even though extending that to an unsigned short would regenerate OP,
8742 since the result of extending (signed char)-1 to (int)
8743 is different from (int) OP. */
8745 tree
8746 get_unwidened (tree op, tree for_type)
8748 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8749 tree type = TREE_TYPE (op);
8750 unsigned final_prec
8751 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8752 int uns
8753 = (for_type != 0 && for_type != type
8754 && final_prec > TYPE_PRECISION (type)
8755 && TYPE_UNSIGNED (type));
8756 tree win = op;
8758 while (CONVERT_EXPR_P (op))
8760 int bitschange;
8762 /* TYPE_PRECISION on vector types has different meaning
8763 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8764 so avoid them here. */
8765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8766 break;
8768 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8769 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8771 /* Truncations are many-one so cannot be removed.
8772 Unless we are later going to truncate down even farther. */
8773 if (bitschange < 0
8774 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8775 break;
8777 /* See what's inside this conversion. If we decide to strip it,
8778 we will set WIN. */
8779 op = TREE_OPERAND (op, 0);
8781 /* If we have not stripped any zero-extensions (uns is 0),
8782 we can strip any kind of extension.
8783 If we have previously stripped a zero-extension,
8784 only zero-extensions can safely be stripped.
8785 Any extension can be stripped if the bits it would produce
8786 are all going to be discarded later by truncating to FOR_TYPE. */
8788 if (bitschange > 0)
8790 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8791 win = op;
8792 /* TYPE_UNSIGNED says whether this is a zero-extension.
8793 Let's avoid computing it if it does not affect WIN
8794 and if UNS will not be needed again. */
8795 if ((uns
8796 || CONVERT_EXPR_P (op))
8797 && TYPE_UNSIGNED (TREE_TYPE (op)))
8799 uns = 1;
8800 win = op;
8805 /* If we finally reach a constant see if it fits in sth smaller and
8806 in that case convert it. */
8807 if (TREE_CODE (win) == INTEGER_CST)
8809 tree wtype = TREE_TYPE (win);
8810 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8811 if (for_type)
8812 prec = MAX (prec, final_prec);
8813 if (prec < TYPE_PRECISION (wtype))
8815 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8816 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8817 win = fold_convert (t, win);
8821 return win;
8824 /* Return OP or a simpler expression for a narrower value
8825 which can be sign-extended or zero-extended to give back OP.
8826 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8827 or 0 if the value should be sign-extended. */
8829 tree
8830 get_narrower (tree op, int *unsignedp_ptr)
8832 int uns = 0;
8833 int first = 1;
8834 tree win = op;
8835 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8837 while (TREE_CODE (op) == NOP_EXPR)
8839 int bitschange
8840 = (TYPE_PRECISION (TREE_TYPE (op))
8841 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8843 /* Truncations are many-one so cannot be removed. */
8844 if (bitschange < 0)
8845 break;
8847 /* See what's inside this conversion. If we decide to strip it,
8848 we will set WIN. */
8850 if (bitschange > 0)
8852 op = TREE_OPERAND (op, 0);
8853 /* An extension: the outermost one can be stripped,
8854 but remember whether it is zero or sign extension. */
8855 if (first)
8856 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8857 /* Otherwise, if a sign extension has been stripped,
8858 only sign extensions can now be stripped;
8859 if a zero extension has been stripped, only zero-extensions. */
8860 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8861 break;
8862 first = 0;
8864 else /* bitschange == 0 */
8866 /* A change in nominal type can always be stripped, but we must
8867 preserve the unsignedness. */
8868 if (first)
8869 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8870 first = 0;
8871 op = TREE_OPERAND (op, 0);
8872 /* Keep trying to narrow, but don't assign op to win if it
8873 would turn an integral type into something else. */
8874 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8875 continue;
8878 win = op;
8881 if (TREE_CODE (op) == COMPONENT_REF
8882 /* Since type_for_size always gives an integer type. */
8883 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8884 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8885 /* Ensure field is laid out already. */
8886 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8887 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8889 unsigned HOST_WIDE_INT innerprec
8890 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8891 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8892 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8893 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8895 /* We can get this structure field in a narrower type that fits it,
8896 but the resulting extension to its nominal type (a fullword type)
8897 must satisfy the same conditions as for other extensions.
8899 Do this only for fields that are aligned (not bit-fields),
8900 because when bit-field insns will be used there is no
8901 advantage in doing this. */
8903 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8904 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8905 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8906 && type != 0)
8908 if (first)
8909 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8910 win = fold_convert (type, op);
8914 *unsignedp_ptr = uns;
8915 return win;
8918 /* Return true if integer constant C has a value that is permissible
8919 for TYPE, an integral type. */
8921 bool
8922 int_fits_type_p (const_tree c, const_tree type)
8924 tree type_low_bound, type_high_bound;
8925 bool ok_for_low_bound, ok_for_high_bound;
8926 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8928 /* Non-standard boolean types can have arbitrary precision but various
8929 transformations assume that they can only take values 0 and +/-1. */
8930 if (TREE_CODE (type) == BOOLEAN_TYPE)
8931 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8933 retry:
8934 type_low_bound = TYPE_MIN_VALUE (type);
8935 type_high_bound = TYPE_MAX_VALUE (type);
8937 /* If at least one bound of the type is a constant integer, we can check
8938 ourselves and maybe make a decision. If no such decision is possible, but
8939 this type is a subtype, try checking against that. Otherwise, use
8940 fits_to_tree_p, which checks against the precision.
8942 Compute the status for each possibly constant bound, and return if we see
8943 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8944 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8945 for "constant known to fit". */
8947 /* Check if c >= type_low_bound. */
8948 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8950 if (tree_int_cst_lt (c, type_low_bound))
8951 return false;
8952 ok_for_low_bound = true;
8954 else
8955 ok_for_low_bound = false;
8957 /* Check if c <= type_high_bound. */
8958 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8960 if (tree_int_cst_lt (type_high_bound, c))
8961 return false;
8962 ok_for_high_bound = true;
8964 else
8965 ok_for_high_bound = false;
8967 /* If the constant fits both bounds, the result is known. */
8968 if (ok_for_low_bound && ok_for_high_bound)
8969 return true;
8971 /* Perform some generic filtering which may allow making a decision
8972 even if the bounds are not constant. First, negative integers
8973 never fit in unsigned types, */
8974 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8975 return false;
8977 /* Second, narrower types always fit in wider ones. */
8978 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8979 return true;
8981 /* Third, unsigned integers with top bit set never fit signed types. */
8982 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8984 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8985 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8987 /* When a tree_cst is converted to a wide-int, the precision
8988 is taken from the type. However, if the precision of the
8989 mode underneath the type is smaller than that, it is
8990 possible that the value will not fit. The test below
8991 fails if any bit is set between the sign bit of the
8992 underlying mode and the top bit of the type. */
8993 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8994 return false;
8996 else if (wi::neg_p (wi::to_wide (c)))
8997 return false;
9000 /* If we haven't been able to decide at this point, there nothing more we
9001 can check ourselves here. Look at the base type if we have one and it
9002 has the same precision. */
9003 if (TREE_CODE (type) == INTEGER_TYPE
9004 && TREE_TYPE (type) != 0
9005 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9007 type = TREE_TYPE (type);
9008 goto retry;
9011 /* Or to fits_to_tree_p, if nothing else. */
9012 return wi::fits_to_tree_p (wi::to_wide (c), type);
9015 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9016 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9017 represented (assuming two's-complement arithmetic) within the bit
9018 precision of the type are returned instead. */
9020 void
9021 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9023 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9024 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9025 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9026 else
9028 if (TYPE_UNSIGNED (type))
9029 mpz_set_ui (min, 0);
9030 else
9032 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9033 wi::to_mpz (mn, min, SIGNED);
9037 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9038 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9039 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9040 else
9042 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9043 wi::to_mpz (mn, max, TYPE_SIGN (type));
9047 /* Return true if VAR is an automatic variable defined in function FN. */
9049 bool
9050 auto_var_in_fn_p (const_tree var, const_tree fn)
9052 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9053 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9054 || TREE_CODE (var) == PARM_DECL)
9055 && ! TREE_STATIC (var))
9056 || TREE_CODE (var) == LABEL_DECL
9057 || TREE_CODE (var) == RESULT_DECL));
9060 /* Subprogram of following function. Called by walk_tree.
9062 Return *TP if it is an automatic variable or parameter of the
9063 function passed in as DATA. */
9065 static tree
9066 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9068 tree fn = (tree) data;
9070 if (TYPE_P (*tp))
9071 *walk_subtrees = 0;
9073 else if (DECL_P (*tp)
9074 && auto_var_in_fn_p (*tp, fn))
9075 return *tp;
9077 return NULL_TREE;
9080 /* Returns true if T is, contains, or refers to a type with variable
9081 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9082 arguments, but not the return type. If FN is nonzero, only return
9083 true if a modifier of the type or position of FN is a variable or
9084 parameter inside FN.
9086 This concept is more general than that of C99 'variably modified types':
9087 in C99, a struct type is never variably modified because a VLA may not
9088 appear as a structure member. However, in GNU C code like:
9090 struct S { int i[f()]; };
9092 is valid, and other languages may define similar constructs. */
9094 bool
9095 variably_modified_type_p (tree type, tree fn)
9097 tree t;
9099 /* Test if T is either variable (if FN is zero) or an expression containing
9100 a variable in FN. If TYPE isn't gimplified, return true also if
9101 gimplify_one_sizepos would gimplify the expression into a local
9102 variable. */
9103 #define RETURN_TRUE_IF_VAR(T) \
9104 do { tree _t = (T); \
9105 if (_t != NULL_TREE \
9106 && _t != error_mark_node \
9107 && !CONSTANT_CLASS_P (_t) \
9108 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9109 && (!fn \
9110 || (!TYPE_SIZES_GIMPLIFIED (type) \
9111 && (TREE_CODE (_t) != VAR_DECL \
9112 && !CONTAINS_PLACEHOLDER_P (_t))) \
9113 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9114 return true; } while (0)
9116 if (type == error_mark_node)
9117 return false;
9119 /* If TYPE itself has variable size, it is variably modified. */
9120 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9121 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9123 switch (TREE_CODE (type))
9125 case POINTER_TYPE:
9126 case REFERENCE_TYPE:
9127 case VECTOR_TYPE:
9128 /* Ada can have pointer types refering to themselves indirectly. */
9129 if (TREE_VISITED (type))
9130 return false;
9131 TREE_VISITED (type) = true;
9132 if (variably_modified_type_p (TREE_TYPE (type), fn))
9134 TREE_VISITED (type) = false;
9135 return true;
9137 TREE_VISITED (type) = false;
9138 break;
9140 case FUNCTION_TYPE:
9141 case METHOD_TYPE:
9142 /* If TYPE is a function type, it is variably modified if the
9143 return type is variably modified. */
9144 if (variably_modified_type_p (TREE_TYPE (type), fn))
9145 return true;
9146 break;
9148 case INTEGER_TYPE:
9149 case REAL_TYPE:
9150 case FIXED_POINT_TYPE:
9151 case ENUMERAL_TYPE:
9152 case BOOLEAN_TYPE:
9153 /* Scalar types are variably modified if their end points
9154 aren't constant. */
9155 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9156 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9157 break;
9159 case RECORD_TYPE:
9160 case UNION_TYPE:
9161 case QUAL_UNION_TYPE:
9162 /* We can't see if any of the fields are variably-modified by the
9163 definition we normally use, since that would produce infinite
9164 recursion via pointers. */
9165 /* This is variably modified if some field's type is. */
9166 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9167 if (TREE_CODE (t) == FIELD_DECL)
9169 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9170 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9171 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9173 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9174 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9176 break;
9178 case ARRAY_TYPE:
9179 /* Do not call ourselves to avoid infinite recursion. This is
9180 variably modified if the element type is. */
9181 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9182 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9183 break;
9185 default:
9186 break;
9189 /* The current language may have other cases to check, but in general,
9190 all other types are not variably modified. */
9191 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9193 #undef RETURN_TRUE_IF_VAR
9196 /* Given a DECL or TYPE, return the scope in which it was declared, or
9197 NULL_TREE if there is no containing scope. */
9199 tree
9200 get_containing_scope (const_tree t)
9202 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9205 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9207 const_tree
9208 get_ultimate_context (const_tree decl)
9210 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9212 if (TREE_CODE (decl) == BLOCK)
9213 decl = BLOCK_SUPERCONTEXT (decl);
9214 else
9215 decl = get_containing_scope (decl);
9217 return decl;
9220 /* Return the innermost context enclosing DECL that is
9221 a FUNCTION_DECL, or zero if none. */
9223 tree
9224 decl_function_context (const_tree decl)
9226 tree context;
9228 if (TREE_CODE (decl) == ERROR_MARK)
9229 return 0;
9231 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9232 where we look up the function at runtime. Such functions always take
9233 a first argument of type 'pointer to real context'.
9235 C++ should really be fixed to use DECL_CONTEXT for the real context,
9236 and use something else for the "virtual context". */
9237 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9238 context
9239 = TYPE_MAIN_VARIANT
9240 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9241 else
9242 context = DECL_CONTEXT (decl);
9244 while (context && TREE_CODE (context) != FUNCTION_DECL)
9246 if (TREE_CODE (context) == BLOCK)
9247 context = BLOCK_SUPERCONTEXT (context);
9248 else
9249 context = get_containing_scope (context);
9252 return context;
9255 /* Return the innermost context enclosing DECL that is
9256 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9257 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9259 tree
9260 decl_type_context (const_tree decl)
9262 tree context = DECL_CONTEXT (decl);
9264 while (context)
9265 switch (TREE_CODE (context))
9267 case NAMESPACE_DECL:
9268 case TRANSLATION_UNIT_DECL:
9269 return NULL_TREE;
9271 case RECORD_TYPE:
9272 case UNION_TYPE:
9273 case QUAL_UNION_TYPE:
9274 return context;
9276 case TYPE_DECL:
9277 case FUNCTION_DECL:
9278 context = DECL_CONTEXT (context);
9279 break;
9281 case BLOCK:
9282 context = BLOCK_SUPERCONTEXT (context);
9283 break;
9285 default:
9286 gcc_unreachable ();
9289 return NULL_TREE;
9292 /* CALL is a CALL_EXPR. Return the declaration for the function
9293 called, or NULL_TREE if the called function cannot be
9294 determined. */
9296 tree
9297 get_callee_fndecl (const_tree call)
9299 tree addr;
9301 if (call == error_mark_node)
9302 return error_mark_node;
9304 /* It's invalid to call this function with anything but a
9305 CALL_EXPR. */
9306 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9308 /* The first operand to the CALL is the address of the function
9309 called. */
9310 addr = CALL_EXPR_FN (call);
9312 /* If there is no function, return early. */
9313 if (addr == NULL_TREE)
9314 return NULL_TREE;
9316 STRIP_NOPS (addr);
9318 /* If this is a readonly function pointer, extract its initial value. */
9319 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9320 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9321 && DECL_INITIAL (addr))
9322 addr = DECL_INITIAL (addr);
9324 /* If the address is just `&f' for some function `f', then we know
9325 that `f' is being called. */
9326 if (TREE_CODE (addr) == ADDR_EXPR
9327 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9328 return TREE_OPERAND (addr, 0);
9330 /* We couldn't figure out what was being called. */
9331 return NULL_TREE;
9334 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9335 return the associated function code, otherwise return CFN_LAST. */
9337 combined_fn
9338 get_call_combined_fn (const_tree call)
9340 /* It's invalid to call this function with anything but a CALL_EXPR. */
9341 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9343 if (!CALL_EXPR_FN (call))
9344 return as_combined_fn (CALL_EXPR_IFN (call));
9346 tree fndecl = get_callee_fndecl (call);
9347 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9348 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9350 return CFN_LAST;
9353 /* Comparator of indices based on tree_node_counts. */
9355 static int
9356 tree_nodes_cmp (const void *p1, const void *p2)
9358 const unsigned *n1 = (const unsigned *)p1;
9359 const unsigned *n2 = (const unsigned *)p2;
9361 return tree_node_counts[*n1] - tree_node_counts[*n2];
9364 /* Comparator of indices based on tree_code_counts. */
9366 static int
9367 tree_codes_cmp (const void *p1, const void *p2)
9369 const unsigned *n1 = (const unsigned *)p1;
9370 const unsigned *n2 = (const unsigned *)p2;
9372 return tree_code_counts[*n1] - tree_code_counts[*n2];
9375 #define TREE_MEM_USAGE_SPACES 40
9377 /* Print debugging information about tree nodes generated during the compile,
9378 and any language-specific information. */
9380 void
9381 dump_tree_statistics (void)
9383 if (GATHER_STATISTICS)
9385 uint64_t total_nodes, total_bytes;
9386 fprintf (stderr, "\nKind Nodes Bytes\n");
9387 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9388 total_nodes = total_bytes = 0;
9391 auto_vec<unsigned> indices (all_kinds);
9392 for (unsigned i = 0; i < all_kinds; i++)
9393 indices.quick_push (i);
9394 indices.qsort (tree_nodes_cmp);
9396 for (unsigned i = 0; i < (int) all_kinds; i++)
9398 unsigned j = indices[i];
9399 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9400 tree_node_kind_names[i], SIZE_AMOUNT (tree_node_counts[j]),
9401 SIZE_AMOUNT (tree_node_sizes[j]));
9402 total_nodes += tree_node_counts[j];
9403 total_bytes += tree_node_sizes[j];
9405 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9406 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9407 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9408 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9412 fprintf (stderr, "Code Nodes\n");
9413 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9415 auto_vec<unsigned> indices (MAX_TREE_CODES);
9416 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9417 indices.quick_push (i);
9418 indices.qsort (tree_codes_cmp);
9420 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9422 unsigned j = indices[i];
9423 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9424 get_tree_code_name ((enum tree_code) j),
9425 SIZE_AMOUNT (tree_code_counts[j]));
9427 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9428 fprintf (stderr, "\n");
9429 ssanames_print_statistics ();
9430 fprintf (stderr, "\n");
9431 phinodes_print_statistics ();
9432 fprintf (stderr, "\n");
9435 else
9436 fprintf (stderr, "(No per-node statistics)\n");
9438 print_type_hash_statistics ();
9439 print_debug_expr_statistics ();
9440 print_value_expr_statistics ();
9441 lang_hooks.print_statistics ();
9444 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9446 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9448 unsigned
9449 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9451 /* This relies on the raw feedback's top 4 bits being zero. */
9452 #define FEEDBACK(X) ((X) * 0x04c11db7)
9453 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9454 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9455 static const unsigned syndromes[16] =
9457 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9458 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9459 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9460 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9462 #undef FEEDBACK
9463 #undef SYNDROME
9465 value <<= (32 - bytes * 8);
9466 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9468 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9470 chksum = (chksum << 4) ^ feedback;
9473 return chksum;
9476 /* Generate a crc32 of a string. */
9478 unsigned
9479 crc32_string (unsigned chksum, const char *string)
9482 chksum = crc32_byte (chksum, *string);
9483 while (*string++);
9484 return chksum;
9487 /* P is a string that will be used in a symbol. Mask out any characters
9488 that are not valid in that context. */
9490 void
9491 clean_symbol_name (char *p)
9493 for (; *p; p++)
9494 if (! (ISALNUM (*p)
9495 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9496 || *p == '$'
9497 #endif
9498 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9499 || *p == '.'
9500 #endif
9502 *p = '_';
9505 /* For anonymous aggregate types, we need some sort of name to
9506 hold on to. In practice, this should not appear, but it should
9507 not be harmful if it does. */
9508 bool
9509 anon_aggrname_p(const_tree id_node)
9511 #ifndef NO_DOT_IN_LABEL
9512 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9513 && IDENTIFIER_POINTER (id_node)[1] == '_');
9514 #else /* NO_DOT_IN_LABEL */
9515 #ifndef NO_DOLLAR_IN_LABEL
9516 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9517 && IDENTIFIER_POINTER (id_node)[1] == '_');
9518 #else /* NO_DOLLAR_IN_LABEL */
9519 #define ANON_AGGRNAME_PREFIX "__anon_"
9520 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9521 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9522 #endif /* NO_DOLLAR_IN_LABEL */
9523 #endif /* NO_DOT_IN_LABEL */
9526 /* Return a format for an anonymous aggregate name. */
9527 const char *
9528 anon_aggrname_format()
9530 #ifndef NO_DOT_IN_LABEL
9531 return "._%d";
9532 #else /* NO_DOT_IN_LABEL */
9533 #ifndef NO_DOLLAR_IN_LABEL
9534 return "$_%d";
9535 #else /* NO_DOLLAR_IN_LABEL */
9536 return "__anon_%d";
9537 #endif /* NO_DOLLAR_IN_LABEL */
9538 #endif /* NO_DOT_IN_LABEL */
9541 /* Generate a name for a special-purpose function.
9542 The generated name may need to be unique across the whole link.
9543 Changes to this function may also require corresponding changes to
9544 xstrdup_mask_random.
9545 TYPE is some string to identify the purpose of this function to the
9546 linker or collect2; it must start with an uppercase letter,
9547 one of:
9548 I - for constructors
9549 D - for destructors
9550 N - for C++ anonymous namespaces
9551 F - for DWARF unwind frame information. */
9553 tree
9554 get_file_function_name (const char *type)
9556 char *buf;
9557 const char *p;
9558 char *q;
9560 /* If we already have a name we know to be unique, just use that. */
9561 if (first_global_object_name)
9562 p = q = ASTRDUP (first_global_object_name);
9563 /* If the target is handling the constructors/destructors, they
9564 will be local to this file and the name is only necessary for
9565 debugging purposes.
9566 We also assign sub_I and sub_D sufixes to constructors called from
9567 the global static constructors. These are always local. */
9568 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9569 || (strncmp (type, "sub_", 4) == 0
9570 && (type[4] == 'I' || type[4] == 'D')))
9572 const char *file = main_input_filename;
9573 if (! file)
9574 file = LOCATION_FILE (input_location);
9575 /* Just use the file's basename, because the full pathname
9576 might be quite long. */
9577 p = q = ASTRDUP (lbasename (file));
9579 else
9581 /* Otherwise, the name must be unique across the entire link.
9582 We don't have anything that we know to be unique to this translation
9583 unit, so use what we do have and throw in some randomness. */
9584 unsigned len;
9585 const char *name = weak_global_object_name;
9586 const char *file = main_input_filename;
9588 if (! name)
9589 name = "";
9590 if (! file)
9591 file = LOCATION_FILE (input_location);
9593 len = strlen (file);
9594 q = (char *) alloca (9 + 19 + len + 1);
9595 memcpy (q, file, len + 1);
9597 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9598 crc32_string (0, name), get_random_seed (false));
9600 p = q;
9603 clean_symbol_name (q);
9604 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9605 + strlen (type));
9607 /* Set up the name of the file-level functions we may need.
9608 Use a global object (which is already required to be unique over
9609 the program) rather than the file name (which imposes extra
9610 constraints). */
9611 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9613 return get_identifier (buf);
9616 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9618 /* Complain that the tree code of NODE does not match the expected 0
9619 terminated list of trailing codes. The trailing code list can be
9620 empty, for a more vague error message. FILE, LINE, and FUNCTION
9621 are of the caller. */
9623 void
9624 tree_check_failed (const_tree node, const char *file,
9625 int line, const char *function, ...)
9627 va_list args;
9628 const char *buffer;
9629 unsigned length = 0;
9630 enum tree_code code;
9632 va_start (args, function);
9633 while ((code = (enum tree_code) va_arg (args, int)))
9634 length += 4 + strlen (get_tree_code_name (code));
9635 va_end (args);
9636 if (length)
9638 char *tmp;
9639 va_start (args, function);
9640 length += strlen ("expected ");
9641 buffer = tmp = (char *) alloca (length);
9642 length = 0;
9643 while ((code = (enum tree_code) va_arg (args, int)))
9645 const char *prefix = length ? " or " : "expected ";
9647 strcpy (tmp + length, prefix);
9648 length += strlen (prefix);
9649 strcpy (tmp + length, get_tree_code_name (code));
9650 length += strlen (get_tree_code_name (code));
9652 va_end (args);
9654 else
9655 buffer = "unexpected node";
9657 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9658 buffer, get_tree_code_name (TREE_CODE (node)),
9659 function, trim_filename (file), line);
9662 /* Complain that the tree code of NODE does match the expected 0
9663 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9664 the caller. */
9666 void
9667 tree_not_check_failed (const_tree node, const char *file,
9668 int line, const char *function, ...)
9670 va_list args;
9671 char *buffer;
9672 unsigned length = 0;
9673 enum tree_code code;
9675 va_start (args, function);
9676 while ((code = (enum tree_code) va_arg (args, int)))
9677 length += 4 + strlen (get_tree_code_name (code));
9678 va_end (args);
9679 va_start (args, function);
9680 buffer = (char *) alloca (length);
9681 length = 0;
9682 while ((code = (enum tree_code) va_arg (args, int)))
9684 if (length)
9686 strcpy (buffer + length, " or ");
9687 length += 4;
9689 strcpy (buffer + length, get_tree_code_name (code));
9690 length += strlen (get_tree_code_name (code));
9692 va_end (args);
9694 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9695 buffer, get_tree_code_name (TREE_CODE (node)),
9696 function, trim_filename (file), line);
9699 /* Similar to tree_check_failed, except that we check for a class of tree
9700 code, given in CL. */
9702 void
9703 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9704 const char *file, int line, const char *function)
9706 internal_error
9707 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9708 TREE_CODE_CLASS_STRING (cl),
9709 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9710 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9713 /* Similar to tree_check_failed, except that instead of specifying a
9714 dozen codes, use the knowledge that they're all sequential. */
9716 void
9717 tree_range_check_failed (const_tree node, const char *file, int line,
9718 const char *function, enum tree_code c1,
9719 enum tree_code c2)
9721 char *buffer;
9722 unsigned length = 0;
9723 unsigned int c;
9725 for (c = c1; c <= c2; ++c)
9726 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9728 length += strlen ("expected ");
9729 buffer = (char *) alloca (length);
9730 length = 0;
9732 for (c = c1; c <= c2; ++c)
9734 const char *prefix = length ? " or " : "expected ";
9736 strcpy (buffer + length, prefix);
9737 length += strlen (prefix);
9738 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9739 length += strlen (get_tree_code_name ((enum tree_code) c));
9742 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9743 buffer, get_tree_code_name (TREE_CODE (node)),
9744 function, trim_filename (file), line);
9748 /* Similar to tree_check_failed, except that we check that a tree does
9749 not have the specified code, given in CL. */
9751 void
9752 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9753 const char *file, int line, const char *function)
9755 internal_error
9756 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9757 TREE_CODE_CLASS_STRING (cl),
9758 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9759 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9763 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9765 void
9766 omp_clause_check_failed (const_tree node, const char *file, int line,
9767 const char *function, enum omp_clause_code code)
9769 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9770 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9771 function, trim_filename (file), line);
9775 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9777 void
9778 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9779 const char *function, enum omp_clause_code c1,
9780 enum omp_clause_code c2)
9782 char *buffer;
9783 unsigned length = 0;
9784 unsigned int c;
9786 for (c = c1; c <= c2; ++c)
9787 length += 4 + strlen (omp_clause_code_name[c]);
9789 length += strlen ("expected ");
9790 buffer = (char *) alloca (length);
9791 length = 0;
9793 for (c = c1; c <= c2; ++c)
9795 const char *prefix = length ? " or " : "expected ";
9797 strcpy (buffer + length, prefix);
9798 length += strlen (prefix);
9799 strcpy (buffer + length, omp_clause_code_name[c]);
9800 length += strlen (omp_clause_code_name[c]);
9803 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9804 buffer, omp_clause_code_name[TREE_CODE (node)],
9805 function, trim_filename (file), line);
9809 #undef DEFTREESTRUCT
9810 #define DEFTREESTRUCT(VAL, NAME) NAME,
9812 static const char *ts_enum_names[] = {
9813 #include "treestruct.def"
9815 #undef DEFTREESTRUCT
9817 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9819 /* Similar to tree_class_check_failed, except that we check for
9820 whether CODE contains the tree structure identified by EN. */
9822 void
9823 tree_contains_struct_check_failed (const_tree node,
9824 const enum tree_node_structure_enum en,
9825 const char *file, int line,
9826 const char *function)
9828 internal_error
9829 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9830 TS_ENUM_NAME (en),
9831 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9835 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9836 (dynamically sized) vector. */
9838 void
9839 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9840 const char *function)
9842 internal_error
9843 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9844 idx + 1, len, function, trim_filename (file), line);
9847 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9848 (dynamically sized) vector. */
9850 void
9851 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9852 const char *function)
9854 internal_error
9855 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9856 idx + 1, len, function, trim_filename (file), line);
9859 /* Similar to above, except that the check is for the bounds of the operand
9860 vector of an expression node EXP. */
9862 void
9863 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9864 int line, const char *function)
9866 enum tree_code code = TREE_CODE (exp);
9867 internal_error
9868 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9869 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9870 function, trim_filename (file), line);
9873 /* Similar to above, except that the check is for the number of
9874 operands of an OMP_CLAUSE node. */
9876 void
9877 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9878 int line, const char *function)
9880 internal_error
9881 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9882 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9883 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9884 trim_filename (file), line);
9886 #endif /* ENABLE_TREE_CHECKING */
9888 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9889 and mapped to the machine mode MODE. Initialize its fields and build
9890 the information necessary for debugging output. */
9892 static tree
9893 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9895 tree t;
9896 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9898 t = make_node (VECTOR_TYPE);
9899 TREE_TYPE (t) = mv_innertype;
9900 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9901 SET_TYPE_MODE (t, mode);
9903 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9904 SET_TYPE_STRUCTURAL_EQUALITY (t);
9905 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9906 || mode != VOIDmode)
9907 && !VECTOR_BOOLEAN_TYPE_P (t))
9908 TYPE_CANONICAL (t)
9909 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9911 layout_type (t);
9913 hashval_t hash = type_hash_canon_hash (t);
9914 t = type_hash_canon (hash, t);
9916 /* We have built a main variant, based on the main variant of the
9917 inner type. Use it to build the variant we return. */
9918 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9919 && TREE_TYPE (t) != innertype)
9920 return build_type_attribute_qual_variant (t,
9921 TYPE_ATTRIBUTES (innertype),
9922 TYPE_QUALS (innertype));
9924 return t;
9927 static tree
9928 make_or_reuse_type (unsigned size, int unsignedp)
9930 int i;
9932 if (size == INT_TYPE_SIZE)
9933 return unsignedp ? unsigned_type_node : integer_type_node;
9934 if (size == CHAR_TYPE_SIZE)
9935 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9936 if (size == SHORT_TYPE_SIZE)
9937 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9938 if (size == LONG_TYPE_SIZE)
9939 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9940 if (size == LONG_LONG_TYPE_SIZE)
9941 return (unsignedp ? long_long_unsigned_type_node
9942 : long_long_integer_type_node);
9944 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9945 if (size == int_n_data[i].bitsize
9946 && int_n_enabled_p[i])
9947 return (unsignedp ? int_n_trees[i].unsigned_type
9948 : int_n_trees[i].signed_type);
9950 if (unsignedp)
9951 return make_unsigned_type (size);
9952 else
9953 return make_signed_type (size);
9956 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9958 static tree
9959 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9961 if (satp)
9963 if (size == SHORT_FRACT_TYPE_SIZE)
9964 return unsignedp ? sat_unsigned_short_fract_type_node
9965 : sat_short_fract_type_node;
9966 if (size == FRACT_TYPE_SIZE)
9967 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9968 if (size == LONG_FRACT_TYPE_SIZE)
9969 return unsignedp ? sat_unsigned_long_fract_type_node
9970 : sat_long_fract_type_node;
9971 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9972 return unsignedp ? sat_unsigned_long_long_fract_type_node
9973 : sat_long_long_fract_type_node;
9975 else
9977 if (size == SHORT_FRACT_TYPE_SIZE)
9978 return unsignedp ? unsigned_short_fract_type_node
9979 : short_fract_type_node;
9980 if (size == FRACT_TYPE_SIZE)
9981 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9982 if (size == LONG_FRACT_TYPE_SIZE)
9983 return unsignedp ? unsigned_long_fract_type_node
9984 : long_fract_type_node;
9985 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9986 return unsignedp ? unsigned_long_long_fract_type_node
9987 : long_long_fract_type_node;
9990 return make_fract_type (size, unsignedp, satp);
9993 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9995 static tree
9996 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9998 if (satp)
10000 if (size == SHORT_ACCUM_TYPE_SIZE)
10001 return unsignedp ? sat_unsigned_short_accum_type_node
10002 : sat_short_accum_type_node;
10003 if (size == ACCUM_TYPE_SIZE)
10004 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10005 if (size == LONG_ACCUM_TYPE_SIZE)
10006 return unsignedp ? sat_unsigned_long_accum_type_node
10007 : sat_long_accum_type_node;
10008 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10009 return unsignedp ? sat_unsigned_long_long_accum_type_node
10010 : sat_long_long_accum_type_node;
10012 else
10014 if (size == SHORT_ACCUM_TYPE_SIZE)
10015 return unsignedp ? unsigned_short_accum_type_node
10016 : short_accum_type_node;
10017 if (size == ACCUM_TYPE_SIZE)
10018 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10019 if (size == LONG_ACCUM_TYPE_SIZE)
10020 return unsignedp ? unsigned_long_accum_type_node
10021 : long_accum_type_node;
10022 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10023 return unsignedp ? unsigned_long_long_accum_type_node
10024 : long_long_accum_type_node;
10027 return make_accum_type (size, unsignedp, satp);
10031 /* Create an atomic variant node for TYPE. This routine is called
10032 during initialization of data types to create the 5 basic atomic
10033 types. The generic build_variant_type function requires these to
10034 already be set up in order to function properly, so cannot be
10035 called from there. If ALIGN is non-zero, then ensure alignment is
10036 overridden to this value. */
10038 static tree
10039 build_atomic_base (tree type, unsigned int align)
10041 tree t;
10043 /* Make sure its not already registered. */
10044 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10045 return t;
10047 t = build_variant_type_copy (type);
10048 set_type_quals (t, TYPE_QUAL_ATOMIC);
10050 if (align)
10051 SET_TYPE_ALIGN (t, align);
10053 return t;
10056 /* Information about the _FloatN and _FloatNx types. This must be in
10057 the same order as the corresponding TI_* enum values. */
10058 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10060 { 16, false },
10061 { 32, false },
10062 { 64, false },
10063 { 128, false },
10064 { 32, true },
10065 { 64, true },
10066 { 128, true },
10070 /* Create nodes for all integer types (and error_mark_node) using the sizes
10071 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10073 void
10074 build_common_tree_nodes (bool signed_char)
10076 int i;
10078 error_mark_node = make_node (ERROR_MARK);
10079 TREE_TYPE (error_mark_node) = error_mark_node;
10081 initialize_sizetypes ();
10083 /* Define both `signed char' and `unsigned char'. */
10084 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10085 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10086 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10087 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10089 /* Define `char', which is like either `signed char' or `unsigned char'
10090 but not the same as either. */
10091 char_type_node
10092 = (signed_char
10093 ? make_signed_type (CHAR_TYPE_SIZE)
10094 : make_unsigned_type (CHAR_TYPE_SIZE));
10095 TYPE_STRING_FLAG (char_type_node) = 1;
10097 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10098 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10099 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10100 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10101 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10102 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10103 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10104 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10106 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10108 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10109 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10111 if (int_n_enabled_p[i])
10113 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10114 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10118 /* Define a boolean type. This type only represents boolean values but
10119 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10120 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10121 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10122 TYPE_PRECISION (boolean_type_node) = 1;
10123 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10125 /* Define what type to use for size_t. */
10126 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10127 size_type_node = unsigned_type_node;
10128 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10129 size_type_node = long_unsigned_type_node;
10130 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10131 size_type_node = long_long_unsigned_type_node;
10132 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10133 size_type_node = short_unsigned_type_node;
10134 else
10136 int i;
10138 size_type_node = NULL_TREE;
10139 for (i = 0; i < NUM_INT_N_ENTS; i++)
10140 if (int_n_enabled_p[i])
10142 char name[50];
10143 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10145 if (strcmp (name, SIZE_TYPE) == 0)
10147 size_type_node = int_n_trees[i].unsigned_type;
10150 if (size_type_node == NULL_TREE)
10151 gcc_unreachable ();
10154 /* Define what type to use for ptrdiff_t. */
10155 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10156 ptrdiff_type_node = integer_type_node;
10157 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10158 ptrdiff_type_node = long_integer_type_node;
10159 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10160 ptrdiff_type_node = long_long_integer_type_node;
10161 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10162 ptrdiff_type_node = short_integer_type_node;
10163 else
10165 ptrdiff_type_node = NULL_TREE;
10166 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10167 if (int_n_enabled_p[i])
10169 char name[50];
10170 sprintf (name, "__int%d", int_n_data[i].bitsize);
10171 if (strcmp (name, PTRDIFF_TYPE) == 0)
10172 ptrdiff_type_node = int_n_trees[i].signed_type;
10174 if (ptrdiff_type_node == NULL_TREE)
10175 gcc_unreachable ();
10178 /* Fill in the rest of the sized types. Reuse existing type nodes
10179 when possible. */
10180 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10181 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10182 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10183 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10184 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10186 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10187 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10188 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10189 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10190 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10192 /* Don't call build_qualified type for atomics. That routine does
10193 special processing for atomics, and until they are initialized
10194 it's better not to make that call.
10196 Check to see if there is a target override for atomic types. */
10198 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10199 targetm.atomic_align_for_mode (QImode));
10200 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10201 targetm.atomic_align_for_mode (HImode));
10202 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10203 targetm.atomic_align_for_mode (SImode));
10204 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10205 targetm.atomic_align_for_mode (DImode));
10206 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10207 targetm.atomic_align_for_mode (TImode));
10209 access_public_node = get_identifier ("public");
10210 access_protected_node = get_identifier ("protected");
10211 access_private_node = get_identifier ("private");
10213 /* Define these next since types below may used them. */
10214 integer_zero_node = build_int_cst (integer_type_node, 0);
10215 integer_one_node = build_int_cst (integer_type_node, 1);
10216 integer_three_node = build_int_cst (integer_type_node, 3);
10217 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10219 size_zero_node = size_int (0);
10220 size_one_node = size_int (1);
10221 bitsize_zero_node = bitsize_int (0);
10222 bitsize_one_node = bitsize_int (1);
10223 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10225 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10226 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10228 void_type_node = make_node (VOID_TYPE);
10229 layout_type (void_type_node);
10231 /* We are not going to have real types in C with less than byte alignment,
10232 so we might as well not have any types that claim to have it. */
10233 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10234 TYPE_USER_ALIGN (void_type_node) = 0;
10236 void_node = make_node (VOID_CST);
10237 TREE_TYPE (void_node) = void_type_node;
10239 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10240 layout_type (TREE_TYPE (null_pointer_node));
10242 ptr_type_node = build_pointer_type (void_type_node);
10243 const_ptr_type_node
10244 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10245 for (unsigned i = 0;
10246 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10247 ++i)
10248 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10250 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10252 float_type_node = make_node (REAL_TYPE);
10253 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10254 layout_type (float_type_node);
10256 double_type_node = make_node (REAL_TYPE);
10257 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10258 layout_type (double_type_node);
10260 long_double_type_node = make_node (REAL_TYPE);
10261 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10262 layout_type (long_double_type_node);
10264 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10266 int n = floatn_nx_types[i].n;
10267 bool extended = floatn_nx_types[i].extended;
10268 scalar_float_mode mode;
10269 if (!targetm.floatn_mode (n, extended).exists (&mode))
10270 continue;
10271 int precision = GET_MODE_PRECISION (mode);
10272 /* Work around the rs6000 KFmode having precision 113 not
10273 128. */
10274 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10275 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10276 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10277 if (!extended)
10278 gcc_assert (min_precision == n);
10279 if (precision < min_precision)
10280 precision = min_precision;
10281 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10282 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10283 layout_type (FLOATN_NX_TYPE_NODE (i));
10284 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10287 float_ptr_type_node = build_pointer_type (float_type_node);
10288 double_ptr_type_node = build_pointer_type (double_type_node);
10289 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10290 integer_ptr_type_node = build_pointer_type (integer_type_node);
10292 /* Fixed size integer types. */
10293 uint16_type_node = make_or_reuse_type (16, 1);
10294 uint32_type_node = make_or_reuse_type (32, 1);
10295 uint64_type_node = make_or_reuse_type (64, 1);
10297 /* Decimal float types. */
10298 dfloat32_type_node = make_node (REAL_TYPE);
10299 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10300 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10301 layout_type (dfloat32_type_node);
10302 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10304 dfloat64_type_node = make_node (REAL_TYPE);
10305 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10306 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10307 layout_type (dfloat64_type_node);
10308 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10310 dfloat128_type_node = make_node (REAL_TYPE);
10311 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10312 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10313 layout_type (dfloat128_type_node);
10314 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10316 complex_integer_type_node = build_complex_type (integer_type_node, true);
10317 complex_float_type_node = build_complex_type (float_type_node, true);
10318 complex_double_type_node = build_complex_type (double_type_node, true);
10319 complex_long_double_type_node = build_complex_type (long_double_type_node,
10320 true);
10322 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10324 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10325 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10326 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10329 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10330 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10331 sat_ ## KIND ## _type_node = \
10332 make_sat_signed_ ## KIND ## _type (SIZE); \
10333 sat_unsigned_ ## KIND ## _type_node = \
10334 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10335 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10336 unsigned_ ## KIND ## _type_node = \
10337 make_unsigned_ ## KIND ## _type (SIZE);
10339 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10340 sat_ ## WIDTH ## KIND ## _type_node = \
10341 make_sat_signed_ ## KIND ## _type (SIZE); \
10342 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10343 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10344 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10345 unsigned_ ## WIDTH ## KIND ## _type_node = \
10346 make_unsigned_ ## KIND ## _type (SIZE);
10348 /* Make fixed-point type nodes based on four different widths. */
10349 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10350 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10351 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10352 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10353 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10355 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10356 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10357 NAME ## _type_node = \
10358 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10359 u ## NAME ## _type_node = \
10360 make_or_reuse_unsigned_ ## KIND ## _type \
10361 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10362 sat_ ## NAME ## _type_node = \
10363 make_or_reuse_sat_signed_ ## KIND ## _type \
10364 (GET_MODE_BITSIZE (MODE ## mode)); \
10365 sat_u ## NAME ## _type_node = \
10366 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10367 (GET_MODE_BITSIZE (U ## MODE ## mode));
10369 /* Fixed-point type and mode nodes. */
10370 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10371 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10372 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10373 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10374 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10375 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10376 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10377 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10378 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10379 MAKE_FIXED_MODE_NODE (accum, da, DA)
10380 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10383 tree t = targetm.build_builtin_va_list ();
10385 /* Many back-ends define record types without setting TYPE_NAME.
10386 If we copied the record type here, we'd keep the original
10387 record type without a name. This breaks name mangling. So,
10388 don't copy record types and let c_common_nodes_and_builtins()
10389 declare the type to be __builtin_va_list. */
10390 if (TREE_CODE (t) != RECORD_TYPE)
10391 t = build_variant_type_copy (t);
10393 va_list_type_node = t;
10397 /* Modify DECL for given flags.
10398 TM_PURE attribute is set only on types, so the function will modify
10399 DECL's type when ECF_TM_PURE is used. */
10401 void
10402 set_call_expr_flags (tree decl, int flags)
10404 if (flags & ECF_NOTHROW)
10405 TREE_NOTHROW (decl) = 1;
10406 if (flags & ECF_CONST)
10407 TREE_READONLY (decl) = 1;
10408 if (flags & ECF_PURE)
10409 DECL_PURE_P (decl) = 1;
10410 if (flags & ECF_LOOPING_CONST_OR_PURE)
10411 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10412 if (flags & ECF_NOVOPS)
10413 DECL_IS_NOVOPS (decl) = 1;
10414 if (flags & ECF_NORETURN)
10415 TREE_THIS_VOLATILE (decl) = 1;
10416 if (flags & ECF_MALLOC)
10417 DECL_IS_MALLOC (decl) = 1;
10418 if (flags & ECF_RETURNS_TWICE)
10419 DECL_IS_RETURNS_TWICE (decl) = 1;
10420 if (flags & ECF_LEAF)
10421 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10422 NULL, DECL_ATTRIBUTES (decl));
10423 if (flags & ECF_COLD)
10424 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10425 NULL, DECL_ATTRIBUTES (decl));
10426 if (flags & ECF_RET1)
10427 DECL_ATTRIBUTES (decl)
10428 = tree_cons (get_identifier ("fn spec"),
10429 build_tree_list (NULL_TREE, build_string (1, "1")),
10430 DECL_ATTRIBUTES (decl));
10431 if ((flags & ECF_TM_PURE) && flag_tm)
10432 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10433 /* Looping const or pure is implied by noreturn.
10434 There is currently no way to declare looping const or looping pure alone. */
10435 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10436 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10440 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10442 static void
10443 local_define_builtin (const char *name, tree type, enum built_in_function code,
10444 const char *library_name, int ecf_flags)
10446 tree decl;
10448 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10449 library_name, NULL_TREE);
10450 set_call_expr_flags (decl, ecf_flags);
10452 set_builtin_decl (code, decl, true);
10455 /* Call this function after instantiating all builtins that the language
10456 front end cares about. This will build the rest of the builtins
10457 and internal functions that are relied upon by the tree optimizers and
10458 the middle-end. */
10460 void
10461 build_common_builtin_nodes (void)
10463 tree tmp, ftype;
10464 int ecf_flags;
10466 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10467 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10469 ftype = build_function_type (void_type_node, void_list_node);
10470 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10471 local_define_builtin ("__builtin_unreachable", ftype,
10472 BUILT_IN_UNREACHABLE,
10473 "__builtin_unreachable",
10474 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10475 | ECF_CONST | ECF_COLD);
10476 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10477 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10478 "abort",
10479 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10482 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10483 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10485 ftype = build_function_type_list (ptr_type_node,
10486 ptr_type_node, const_ptr_type_node,
10487 size_type_node, NULL_TREE);
10489 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10490 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10491 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10492 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10493 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10494 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10497 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10499 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10500 const_ptr_type_node, size_type_node,
10501 NULL_TREE);
10502 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10503 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10506 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10508 ftype = build_function_type_list (ptr_type_node,
10509 ptr_type_node, integer_type_node,
10510 size_type_node, NULL_TREE);
10511 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10512 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10515 /* If we're checking the stack, `alloca' can throw. */
10516 const int alloca_flags
10517 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10519 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10521 ftype = build_function_type_list (ptr_type_node,
10522 size_type_node, NULL_TREE);
10523 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10524 "alloca", alloca_flags);
10527 ftype = build_function_type_list (ptr_type_node, size_type_node,
10528 size_type_node, NULL_TREE);
10529 local_define_builtin ("__builtin_alloca_with_align", ftype,
10530 BUILT_IN_ALLOCA_WITH_ALIGN,
10531 "__builtin_alloca_with_align",
10532 alloca_flags);
10534 ftype = build_function_type_list (ptr_type_node, size_type_node,
10535 size_type_node, size_type_node, NULL_TREE);
10536 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10537 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10538 "__builtin_alloca_with_align_and_max",
10539 alloca_flags);
10541 ftype = build_function_type_list (void_type_node,
10542 ptr_type_node, ptr_type_node,
10543 ptr_type_node, NULL_TREE);
10544 local_define_builtin ("__builtin_init_trampoline", ftype,
10545 BUILT_IN_INIT_TRAMPOLINE,
10546 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10547 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10548 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10549 "__builtin_init_heap_trampoline",
10550 ECF_NOTHROW | ECF_LEAF);
10551 local_define_builtin ("__builtin_init_descriptor", ftype,
10552 BUILT_IN_INIT_DESCRIPTOR,
10553 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10555 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10556 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10557 BUILT_IN_ADJUST_TRAMPOLINE,
10558 "__builtin_adjust_trampoline",
10559 ECF_CONST | ECF_NOTHROW);
10560 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10561 BUILT_IN_ADJUST_DESCRIPTOR,
10562 "__builtin_adjust_descriptor",
10563 ECF_CONST | ECF_NOTHROW);
10565 ftype = build_function_type_list (void_type_node,
10566 ptr_type_node, ptr_type_node, NULL_TREE);
10567 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10568 BUILT_IN_NONLOCAL_GOTO,
10569 "__builtin_nonlocal_goto",
10570 ECF_NORETURN | ECF_NOTHROW);
10572 ftype = build_function_type_list (void_type_node,
10573 ptr_type_node, ptr_type_node, NULL_TREE);
10574 local_define_builtin ("__builtin_setjmp_setup", ftype,
10575 BUILT_IN_SETJMP_SETUP,
10576 "__builtin_setjmp_setup", ECF_NOTHROW);
10578 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10579 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10580 BUILT_IN_SETJMP_RECEIVER,
10581 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10583 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10584 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10585 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10587 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10588 local_define_builtin ("__builtin_stack_restore", ftype,
10589 BUILT_IN_STACK_RESTORE,
10590 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10592 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10593 const_ptr_type_node, size_type_node,
10594 NULL_TREE);
10595 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10596 "__builtin_memcmp_eq",
10597 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10599 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10600 "__builtin_strncmp_eq",
10601 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10603 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10604 "__builtin_strcmp_eq",
10605 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10607 /* If there's a possibility that we might use the ARM EABI, build the
10608 alternate __cxa_end_cleanup node used to resume from C++. */
10609 if (targetm.arm_eabi_unwinder)
10611 ftype = build_function_type_list (void_type_node, NULL_TREE);
10612 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10613 BUILT_IN_CXA_END_CLEANUP,
10614 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10617 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10618 local_define_builtin ("__builtin_unwind_resume", ftype,
10619 BUILT_IN_UNWIND_RESUME,
10620 ((targetm_common.except_unwind_info (&global_options)
10621 == UI_SJLJ)
10622 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10623 ECF_NORETURN);
10625 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10627 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10628 NULL_TREE);
10629 local_define_builtin ("__builtin_return_address", ftype,
10630 BUILT_IN_RETURN_ADDRESS,
10631 "__builtin_return_address",
10632 ECF_NOTHROW);
10635 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10636 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10638 ftype = build_function_type_list (void_type_node, ptr_type_node,
10639 ptr_type_node, NULL_TREE);
10640 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10641 local_define_builtin ("__cyg_profile_func_enter", ftype,
10642 BUILT_IN_PROFILE_FUNC_ENTER,
10643 "__cyg_profile_func_enter", 0);
10644 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10645 local_define_builtin ("__cyg_profile_func_exit", ftype,
10646 BUILT_IN_PROFILE_FUNC_EXIT,
10647 "__cyg_profile_func_exit", 0);
10650 /* The exception object and filter values from the runtime. The argument
10651 must be zero before exception lowering, i.e. from the front end. After
10652 exception lowering, it will be the region number for the exception
10653 landing pad. These functions are PURE instead of CONST to prevent
10654 them from being hoisted past the exception edge that will initialize
10655 its value in the landing pad. */
10656 ftype = build_function_type_list (ptr_type_node,
10657 integer_type_node, NULL_TREE);
10658 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10659 /* Only use TM_PURE if we have TM language support. */
10660 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10661 ecf_flags |= ECF_TM_PURE;
10662 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10663 "__builtin_eh_pointer", ecf_flags);
10665 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10666 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10667 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10668 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10670 ftype = build_function_type_list (void_type_node,
10671 integer_type_node, integer_type_node,
10672 NULL_TREE);
10673 local_define_builtin ("__builtin_eh_copy_values", ftype,
10674 BUILT_IN_EH_COPY_VALUES,
10675 "__builtin_eh_copy_values", ECF_NOTHROW);
10677 /* Complex multiplication and division. These are handled as builtins
10678 rather than optabs because emit_library_call_value doesn't support
10679 complex. Further, we can do slightly better with folding these
10680 beasties if the real and complex parts of the arguments are separate. */
10682 int mode;
10684 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10686 char mode_name_buf[4], *q;
10687 const char *p;
10688 enum built_in_function mcode, dcode;
10689 tree type, inner_type;
10690 const char *prefix = "__";
10692 if (targetm.libfunc_gnu_prefix)
10693 prefix = "__gnu_";
10695 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10696 if (type == NULL)
10697 continue;
10698 inner_type = TREE_TYPE (type);
10700 ftype = build_function_type_list (type, inner_type, inner_type,
10701 inner_type, inner_type, NULL_TREE);
10703 mcode = ((enum built_in_function)
10704 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10705 dcode = ((enum built_in_function)
10706 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10708 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10709 *q = TOLOWER (*p);
10710 *q = '\0';
10712 /* For -ftrapping-math these should throw from a former
10713 -fnon-call-exception stmt. */
10714 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10715 NULL);
10716 local_define_builtin (built_in_names[mcode], ftype, mcode,
10717 built_in_names[mcode],
10718 ECF_CONST | ECF_LEAF);
10720 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10721 NULL);
10722 local_define_builtin (built_in_names[dcode], ftype, dcode,
10723 built_in_names[dcode],
10724 ECF_CONST | ECF_LEAF);
10728 init_internal_fns ();
10731 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10732 better way.
10734 If we requested a pointer to a vector, build up the pointers that
10735 we stripped off while looking for the inner type. Similarly for
10736 return values from functions.
10738 The argument TYPE is the top of the chain, and BOTTOM is the
10739 new type which we will point to. */
10741 tree
10742 reconstruct_complex_type (tree type, tree bottom)
10744 tree inner, outer;
10746 if (TREE_CODE (type) == POINTER_TYPE)
10748 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10749 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10750 TYPE_REF_CAN_ALIAS_ALL (type));
10752 else if (TREE_CODE (type) == REFERENCE_TYPE)
10754 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10755 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10756 TYPE_REF_CAN_ALIAS_ALL (type));
10758 else if (TREE_CODE (type) == ARRAY_TYPE)
10760 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10761 outer = build_array_type (inner, TYPE_DOMAIN (type));
10763 else if (TREE_CODE (type) == FUNCTION_TYPE)
10765 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10766 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10768 else if (TREE_CODE (type) == METHOD_TYPE)
10770 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10771 /* The build_method_type_directly() routine prepends 'this' to argument list,
10772 so we must compensate by getting rid of it. */
10773 outer
10774 = build_method_type_directly
10775 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10776 inner,
10777 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10779 else if (TREE_CODE (type) == OFFSET_TYPE)
10781 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10782 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10784 else
10785 return bottom;
10787 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10788 TYPE_QUALS (type));
10791 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10792 the inner type. */
10793 tree
10794 build_vector_type_for_mode (tree innertype, machine_mode mode)
10796 poly_int64 nunits;
10797 unsigned int bitsize;
10799 switch (GET_MODE_CLASS (mode))
10801 case MODE_VECTOR_BOOL:
10802 case MODE_VECTOR_INT:
10803 case MODE_VECTOR_FLOAT:
10804 case MODE_VECTOR_FRACT:
10805 case MODE_VECTOR_UFRACT:
10806 case MODE_VECTOR_ACCUM:
10807 case MODE_VECTOR_UACCUM:
10808 nunits = GET_MODE_NUNITS (mode);
10809 break;
10811 case MODE_INT:
10812 /* Check that there are no leftover bits. */
10813 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10814 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10815 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10816 break;
10818 default:
10819 gcc_unreachable ();
10822 return make_vector_type (innertype, nunits, mode);
10825 /* Similarly, but takes the inner type and number of units, which must be
10826 a power of two. */
10828 tree
10829 build_vector_type (tree innertype, poly_int64 nunits)
10831 return make_vector_type (innertype, nunits, VOIDmode);
10834 /* Build truth vector with specified length and number of units. */
10836 tree
10837 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
10839 machine_mode mask_mode
10840 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10842 poly_uint64 vsize;
10843 if (mask_mode == BLKmode)
10844 vsize = vector_size * BITS_PER_UNIT;
10845 else
10846 vsize = GET_MODE_BITSIZE (mask_mode);
10848 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10850 tree bool_type = build_nonstandard_boolean_type (esize);
10852 return make_vector_type (bool_type, nunits, mask_mode);
10855 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10857 tree
10858 build_same_sized_truth_vector_type (tree vectype)
10860 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10861 return vectype;
10863 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
10865 if (known_eq (size, 0U))
10866 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10868 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10871 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10873 tree
10874 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10876 tree t = make_vector_type (innertype, nunits, VOIDmode);
10877 tree cand;
10878 /* We always build the non-opaque variant before the opaque one,
10879 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10880 cand = TYPE_NEXT_VARIANT (t);
10881 if (cand
10882 && TYPE_VECTOR_OPAQUE (cand)
10883 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10884 return cand;
10885 /* Othewise build a variant type and make sure to queue it after
10886 the non-opaque type. */
10887 cand = build_distinct_type_copy (t);
10888 TYPE_VECTOR_OPAQUE (cand) = true;
10889 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10890 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10891 TYPE_NEXT_VARIANT (t) = cand;
10892 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10893 return cand;
10896 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10898 wide_int
10899 vector_cst_int_elt (const_tree t, unsigned int i)
10901 /* First handle elements that are directly encoded. */
10902 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10903 if (i < encoded_nelts)
10904 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10906 /* Identify the pattern that contains element I and work out the index of
10907 the last encoded element for that pattern. */
10908 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10909 unsigned int pattern = i % npatterns;
10910 unsigned int count = i / npatterns;
10911 unsigned int final_i = encoded_nelts - npatterns + pattern;
10913 /* If there are no steps, the final encoded value is the right one. */
10914 if (!VECTOR_CST_STEPPED_P (t))
10915 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10917 /* Otherwise work out the value from the last two encoded elements. */
10918 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10919 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10920 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10921 return wi::to_wide (v2) + (count - 2) * diff;
10924 /* Return the value of element I of VECTOR_CST T. */
10926 tree
10927 vector_cst_elt (const_tree t, unsigned int i)
10929 /* First handle elements that are directly encoded. */
10930 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10931 if (i < encoded_nelts)
10932 return VECTOR_CST_ENCODED_ELT (t, i);
10934 /* If there are no steps, the final encoded value is the right one. */
10935 if (!VECTOR_CST_STEPPED_P (t))
10937 /* Identify the pattern that contains element I and work out the index of
10938 the last encoded element for that pattern. */
10939 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10940 unsigned int pattern = i % npatterns;
10941 unsigned int final_i = encoded_nelts - npatterns + pattern;
10942 return VECTOR_CST_ENCODED_ELT (t, final_i);
10945 /* Otherwise work out the value from the last two encoded elements. */
10946 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10947 vector_cst_int_elt (t, i));
10950 /* Given an initializer INIT, return TRUE if INIT is zero or some
10951 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10952 null, set *NONZERO if and only if INIT is known not to be all
10953 zeros. The combination of return value of false and *NONZERO
10954 false implies that INIT may but need not be all zeros. Other
10955 combinations indicate definitive answers. */
10957 bool
10958 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10960 bool dummy;
10961 if (!nonzero)
10962 nonzero = &dummy;
10964 /* Conservatively clear NONZERO and set it only if INIT is definitely
10965 not all zero. */
10966 *nonzero = false;
10968 STRIP_NOPS (init);
10970 unsigned HOST_WIDE_INT off = 0;
10972 switch (TREE_CODE (init))
10974 case INTEGER_CST:
10975 if (integer_zerop (init))
10976 return true;
10978 *nonzero = true;
10979 return false;
10981 case REAL_CST:
10982 /* ??? Note that this is not correct for C4X float formats. There,
10983 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10984 negative exponent. */
10985 if (real_zerop (init)
10986 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10987 return true;
10989 *nonzero = true;
10990 return false;
10992 case FIXED_CST:
10993 if (fixed_zerop (init))
10994 return true;
10996 *nonzero = true;
10997 return false;
10999 case COMPLEX_CST:
11000 if (integer_zerop (init)
11001 || (real_zerop (init)
11002 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11003 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11004 return true;
11006 *nonzero = true;
11007 return false;
11009 case VECTOR_CST:
11010 if (VECTOR_CST_NPATTERNS (init) == 1
11011 && VECTOR_CST_DUPLICATE_P (init)
11012 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11013 return true;
11015 *nonzero = true;
11016 return false;
11018 case CONSTRUCTOR:
11020 if (TREE_CLOBBER_P (init))
11021 return false;
11023 unsigned HOST_WIDE_INT idx;
11024 tree elt;
11026 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11027 if (!initializer_zerop (elt, nonzero))
11028 return false;
11030 return true;
11033 case MEM_REF:
11035 tree arg = TREE_OPERAND (init, 0);
11036 if (TREE_CODE (arg) != ADDR_EXPR)
11037 return false;
11038 tree offset = TREE_OPERAND (init, 1);
11039 if (TREE_CODE (offset) != INTEGER_CST
11040 || !tree_fits_uhwi_p (offset))
11041 return false;
11042 off = tree_to_uhwi (offset);
11043 if (INT_MAX < off)
11044 return false;
11045 arg = TREE_OPERAND (arg, 0);
11046 if (TREE_CODE (arg) != STRING_CST)
11047 return false;
11048 init = arg;
11050 /* Fall through. */
11052 case STRING_CST:
11054 gcc_assert (off <= INT_MAX);
11056 int i = off;
11057 int n = TREE_STRING_LENGTH (init);
11058 if (n <= i)
11059 return false;
11061 /* We need to loop through all elements to handle cases like
11062 "\0" and "\0foobar". */
11063 for (i = 0; i < n; ++i)
11064 if (TREE_STRING_POINTER (init)[i] != '\0')
11066 *nonzero = true;
11067 return false;
11070 return true;
11073 default:
11074 return false;
11078 /* Check if vector VEC consists of all the equal elements and
11079 that the number of elements corresponds to the type of VEC.
11080 The function returns first element of the vector
11081 or NULL_TREE if the vector is not uniform. */
11082 tree
11083 uniform_vector_p (const_tree vec)
11085 tree first, t;
11086 unsigned HOST_WIDE_INT i, nelts;
11088 if (vec == NULL_TREE)
11089 return NULL_TREE;
11091 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11093 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11094 return TREE_OPERAND (vec, 0);
11096 else if (TREE_CODE (vec) == VECTOR_CST)
11098 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11099 return VECTOR_CST_ENCODED_ELT (vec, 0);
11100 return NULL_TREE;
11103 else if (TREE_CODE (vec) == CONSTRUCTOR
11104 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11106 first = error_mark_node;
11108 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11110 if (i == 0)
11112 first = t;
11113 continue;
11115 if (!operand_equal_p (first, t, 0))
11116 return NULL_TREE;
11118 if (i != nelts)
11119 return NULL_TREE;
11121 return first;
11124 return NULL_TREE;
11127 /* Build an empty statement at location LOC. */
11129 tree
11130 build_empty_stmt (location_t loc)
11132 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11133 SET_EXPR_LOCATION (t, loc);
11134 return t;
11138 /* Build an OpenMP clause with code CODE. LOC is the location of the
11139 clause. */
11141 tree
11142 build_omp_clause (location_t loc, enum omp_clause_code code)
11144 tree t;
11145 int size, length;
11147 length = omp_clause_num_ops[code];
11148 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11150 record_node_allocation_statistics (OMP_CLAUSE, size);
11152 t = (tree) ggc_internal_alloc (size);
11153 memset (t, 0, size);
11154 TREE_SET_CODE (t, OMP_CLAUSE);
11155 OMP_CLAUSE_SET_CODE (t, code);
11156 OMP_CLAUSE_LOCATION (t) = loc;
11158 return t;
11161 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11162 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11163 Except for the CODE and operand count field, other storage for the
11164 object is initialized to zeros. */
11166 tree
11167 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11169 tree t;
11170 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11172 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11173 gcc_assert (len >= 1);
11175 record_node_allocation_statistics (code, length);
11177 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11179 TREE_SET_CODE (t, code);
11181 /* Can't use TREE_OPERAND to store the length because if checking is
11182 enabled, it will try to check the length before we store it. :-P */
11183 t->exp.operands[0] = build_int_cst (sizetype, len);
11185 return t;
11188 /* Helper function for build_call_* functions; build a CALL_EXPR with
11189 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11190 the argument slots. */
11192 static tree
11193 build_call_1 (tree return_type, tree fn, int nargs)
11195 tree t;
11197 t = build_vl_exp (CALL_EXPR, nargs + 3);
11198 TREE_TYPE (t) = return_type;
11199 CALL_EXPR_FN (t) = fn;
11200 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11202 return t;
11205 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11206 FN and a null static chain slot. NARGS is the number of call arguments
11207 which are specified as "..." arguments. */
11209 tree
11210 build_call_nary (tree return_type, tree fn, int nargs, ...)
11212 tree ret;
11213 va_list args;
11214 va_start (args, nargs);
11215 ret = build_call_valist (return_type, fn, nargs, args);
11216 va_end (args);
11217 return ret;
11220 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11221 FN and a null static chain slot. NARGS is the number of call arguments
11222 which are specified as a va_list ARGS. */
11224 tree
11225 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11227 tree t;
11228 int i;
11230 t = build_call_1 (return_type, fn, nargs);
11231 for (i = 0; i < nargs; i++)
11232 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11233 process_call_operands (t);
11234 return t;
11237 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11238 FN and a null static chain slot. NARGS is the number of call arguments
11239 which are specified as a tree array ARGS. */
11241 tree
11242 build_call_array_loc (location_t loc, tree return_type, tree fn,
11243 int nargs, const tree *args)
11245 tree t;
11246 int i;
11248 t = build_call_1 (return_type, fn, nargs);
11249 for (i = 0; i < nargs; i++)
11250 CALL_EXPR_ARG (t, i) = args[i];
11251 process_call_operands (t);
11252 SET_EXPR_LOCATION (t, loc);
11253 return t;
11256 /* Like build_call_array, but takes a vec. */
11258 tree
11259 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11261 tree ret, t;
11262 unsigned int ix;
11264 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11265 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11266 CALL_EXPR_ARG (ret, ix) = t;
11267 process_call_operands (ret);
11268 return ret;
11271 /* Conveniently construct a function call expression. FNDECL names the
11272 function to be called and N arguments are passed in the array
11273 ARGARRAY. */
11275 tree
11276 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11278 tree fntype = TREE_TYPE (fndecl);
11279 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11281 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11284 /* Conveniently construct a function call expression. FNDECL names the
11285 function to be called and the arguments are passed in the vector
11286 VEC. */
11288 tree
11289 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11291 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11292 vec_safe_address (vec));
11296 /* Conveniently construct a function call expression. FNDECL names the
11297 function to be called, N is the number of arguments, and the "..."
11298 parameters are the argument expressions. */
11300 tree
11301 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11303 va_list ap;
11304 tree *argarray = XALLOCAVEC (tree, n);
11305 int i;
11307 va_start (ap, n);
11308 for (i = 0; i < n; i++)
11309 argarray[i] = va_arg (ap, tree);
11310 va_end (ap);
11311 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11314 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11315 varargs macros aren't supported by all bootstrap compilers. */
11317 tree
11318 build_call_expr (tree fndecl, int n, ...)
11320 va_list ap;
11321 tree *argarray = XALLOCAVEC (tree, n);
11322 int i;
11324 va_start (ap, n);
11325 for (i = 0; i < n; i++)
11326 argarray[i] = va_arg (ap, tree);
11327 va_end (ap);
11328 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11331 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11332 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11333 It will get gimplified later into an ordinary internal function. */
11335 tree
11336 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11337 tree type, int n, const tree *args)
11339 tree t = build_call_1 (type, NULL_TREE, n);
11340 for (int i = 0; i < n; ++i)
11341 CALL_EXPR_ARG (t, i) = args[i];
11342 SET_EXPR_LOCATION (t, loc);
11343 CALL_EXPR_IFN (t) = ifn;
11344 return t;
11347 /* Build internal call expression. This is just like CALL_EXPR, except
11348 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11349 internal function. */
11351 tree
11352 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11353 tree type, int n, ...)
11355 va_list ap;
11356 tree *argarray = XALLOCAVEC (tree, n);
11357 int i;
11359 va_start (ap, n);
11360 for (i = 0; i < n; i++)
11361 argarray[i] = va_arg (ap, tree);
11362 va_end (ap);
11363 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11366 /* Return a function call to FN, if the target is guaranteed to support it,
11367 or null otherwise.
11369 N is the number of arguments, passed in the "...", and TYPE is the
11370 type of the return value. */
11372 tree
11373 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11374 int n, ...)
11376 va_list ap;
11377 tree *argarray = XALLOCAVEC (tree, n);
11378 int i;
11380 va_start (ap, n);
11381 for (i = 0; i < n; i++)
11382 argarray[i] = va_arg (ap, tree);
11383 va_end (ap);
11384 if (internal_fn_p (fn))
11386 internal_fn ifn = as_internal_fn (fn);
11387 if (direct_internal_fn_p (ifn))
11389 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11390 if (!direct_internal_fn_supported_p (ifn, types,
11391 OPTIMIZE_FOR_BOTH))
11392 return NULL_TREE;
11394 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11396 else
11398 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11399 if (!fndecl)
11400 return NULL_TREE;
11401 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11405 /* Return a function call to the appropriate builtin alloca variant.
11407 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11408 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11409 bound for SIZE in case it is not a fixed value. */
11411 tree
11412 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11414 if (max_size >= 0)
11416 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11417 return
11418 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11420 else if (align > 0)
11422 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11423 return build_call_expr (t, 2, size, size_int (align));
11425 else
11427 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11428 return build_call_expr (t, 1, size);
11432 /* Create a new constant string literal and return a char* pointer to it.
11433 The STRING_CST value is the LEN characters at STR. */
11434 tree
11435 build_string_literal (int len, const char *str)
11437 tree t, elem, index, type;
11439 t = build_string (len, str);
11440 elem = build_type_variant (char_type_node, 1, 0);
11441 index = build_index_type (size_int (len - 1));
11442 type = build_array_type (elem, index);
11443 TREE_TYPE (t) = type;
11444 TREE_CONSTANT (t) = 1;
11445 TREE_READONLY (t) = 1;
11446 TREE_STATIC (t) = 1;
11448 type = build_pointer_type (elem);
11449 t = build1 (ADDR_EXPR, type,
11450 build4 (ARRAY_REF, elem,
11451 t, integer_zero_node, NULL_TREE, NULL_TREE));
11452 return t;
11457 /* Return true if T (assumed to be a DECL) must be assigned a memory
11458 location. */
11460 bool
11461 needs_to_live_in_memory (const_tree t)
11463 return (TREE_ADDRESSABLE (t)
11464 || is_global_var (t)
11465 || (TREE_CODE (t) == RESULT_DECL
11466 && !DECL_BY_REFERENCE (t)
11467 && aggregate_value_p (t, current_function_decl)));
11470 /* Return value of a constant X and sign-extend it. */
11472 HOST_WIDE_INT
11473 int_cst_value (const_tree x)
11475 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11476 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11478 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11479 gcc_assert (cst_and_fits_in_hwi (x));
11481 if (bits < HOST_BITS_PER_WIDE_INT)
11483 bool negative = ((val >> (bits - 1)) & 1) != 0;
11484 if (negative)
11485 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11486 else
11487 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11490 return val;
11493 /* If TYPE is an integral or pointer type, return an integer type with
11494 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11495 if TYPE is already an integer type of signedness UNSIGNEDP. */
11497 tree
11498 signed_or_unsigned_type_for (int unsignedp, tree type)
11500 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11501 return type;
11503 if (TREE_CODE (type) == VECTOR_TYPE)
11505 tree inner = TREE_TYPE (type);
11506 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11507 if (!inner2)
11508 return NULL_TREE;
11509 if (inner == inner2)
11510 return type;
11511 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11514 if (TREE_CODE (type) == COMPLEX_TYPE)
11516 tree inner = TREE_TYPE (type);
11517 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11518 if (!inner2)
11519 return NULL_TREE;
11520 if (inner == inner2)
11521 return type;
11522 return build_complex_type (inner2);
11525 if (!INTEGRAL_TYPE_P (type)
11526 && !POINTER_TYPE_P (type)
11527 && TREE_CODE (type) != OFFSET_TYPE)
11528 return NULL_TREE;
11530 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11533 /* If TYPE is an integral or pointer type, return an integer type with
11534 the same precision which is unsigned, or itself if TYPE is already an
11535 unsigned integer type. */
11537 tree
11538 unsigned_type_for (tree type)
11540 return signed_or_unsigned_type_for (1, type);
11543 /* If TYPE is an integral or pointer type, return an integer type with
11544 the same precision which is signed, or itself if TYPE is already a
11545 signed integer type. */
11547 tree
11548 signed_type_for (tree type)
11550 return signed_or_unsigned_type_for (0, type);
11553 /* If TYPE is a vector type, return a signed integer vector type with the
11554 same width and number of subparts. Otherwise return boolean_type_node. */
11556 tree
11557 truth_type_for (tree type)
11559 if (TREE_CODE (type) == VECTOR_TYPE)
11561 if (VECTOR_BOOLEAN_TYPE_P (type))
11562 return type;
11563 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11564 GET_MODE_SIZE (TYPE_MODE (type)));
11566 else
11567 return boolean_type_node;
11570 /* Returns the largest value obtainable by casting something in INNER type to
11571 OUTER type. */
11573 tree
11574 upper_bound_in_type (tree outer, tree inner)
11576 unsigned int det = 0;
11577 unsigned oprec = TYPE_PRECISION (outer);
11578 unsigned iprec = TYPE_PRECISION (inner);
11579 unsigned prec;
11581 /* Compute a unique number for every combination. */
11582 det |= (oprec > iprec) ? 4 : 0;
11583 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11584 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11586 /* Determine the exponent to use. */
11587 switch (det)
11589 case 0:
11590 case 1:
11591 /* oprec <= iprec, outer: signed, inner: don't care. */
11592 prec = oprec - 1;
11593 break;
11594 case 2:
11595 case 3:
11596 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11597 prec = oprec;
11598 break;
11599 case 4:
11600 /* oprec > iprec, outer: signed, inner: signed. */
11601 prec = iprec - 1;
11602 break;
11603 case 5:
11604 /* oprec > iprec, outer: signed, inner: unsigned. */
11605 prec = iprec;
11606 break;
11607 case 6:
11608 /* oprec > iprec, outer: unsigned, inner: signed. */
11609 prec = oprec;
11610 break;
11611 case 7:
11612 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11613 prec = iprec;
11614 break;
11615 default:
11616 gcc_unreachable ();
11619 return wide_int_to_tree (outer,
11620 wi::mask (prec, false, TYPE_PRECISION (outer)));
11623 /* Returns the smallest value obtainable by casting something in INNER type to
11624 OUTER type. */
11626 tree
11627 lower_bound_in_type (tree outer, tree inner)
11629 unsigned oprec = TYPE_PRECISION (outer);
11630 unsigned iprec = TYPE_PRECISION (inner);
11632 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11633 and obtain 0. */
11634 if (TYPE_UNSIGNED (outer)
11635 /* If we are widening something of an unsigned type, OUTER type
11636 contains all values of INNER type. In particular, both INNER
11637 and OUTER types have zero in common. */
11638 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11639 return build_int_cst (outer, 0);
11640 else
11642 /* If we are widening a signed type to another signed type, we
11643 want to obtain -2^^(iprec-1). If we are keeping the
11644 precision or narrowing to a signed type, we want to obtain
11645 -2^(oprec-1). */
11646 unsigned prec = oprec > iprec ? iprec : oprec;
11647 return wide_int_to_tree (outer,
11648 wi::mask (prec - 1, true,
11649 TYPE_PRECISION (outer)));
11653 /* Return nonzero if two operands that are suitable for PHI nodes are
11654 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11655 SSA_NAME or invariant. Note that this is strictly an optimization.
11656 That is, callers of this function can directly call operand_equal_p
11657 and get the same result, only slower. */
11660 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11662 if (arg0 == arg1)
11663 return 1;
11664 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11665 return 0;
11666 return operand_equal_p (arg0, arg1, 0);
11669 /* Returns number of zeros at the end of binary representation of X. */
11671 tree
11672 num_ending_zeros (const_tree x)
11674 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11678 #define WALK_SUBTREE(NODE) \
11679 do \
11681 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11682 if (result) \
11683 return result; \
11685 while (0)
11687 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11688 be walked whenever a type is seen in the tree. Rest of operands and return
11689 value are as for walk_tree. */
11691 static tree
11692 walk_type_fields (tree type, walk_tree_fn func, void *data,
11693 hash_set<tree> *pset, walk_tree_lh lh)
11695 tree result = NULL_TREE;
11697 switch (TREE_CODE (type))
11699 case POINTER_TYPE:
11700 case REFERENCE_TYPE:
11701 case VECTOR_TYPE:
11702 /* We have to worry about mutually recursive pointers. These can't
11703 be written in C. They can in Ada. It's pathological, but
11704 there's an ACATS test (c38102a) that checks it. Deal with this
11705 by checking if we're pointing to another pointer, that one
11706 points to another pointer, that one does too, and we have no htab.
11707 If so, get a hash table. We check three levels deep to avoid
11708 the cost of the hash table if we don't need one. */
11709 if (POINTER_TYPE_P (TREE_TYPE (type))
11710 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11711 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11712 && !pset)
11714 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11715 func, data);
11716 if (result)
11717 return result;
11719 break;
11722 /* fall through */
11724 case COMPLEX_TYPE:
11725 WALK_SUBTREE (TREE_TYPE (type));
11726 break;
11728 case METHOD_TYPE:
11729 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11731 /* Fall through. */
11733 case FUNCTION_TYPE:
11734 WALK_SUBTREE (TREE_TYPE (type));
11736 tree arg;
11738 /* We never want to walk into default arguments. */
11739 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11740 WALK_SUBTREE (TREE_VALUE (arg));
11742 break;
11744 case ARRAY_TYPE:
11745 /* Don't follow this nodes's type if a pointer for fear that
11746 we'll have infinite recursion. If we have a PSET, then we
11747 need not fear. */
11748 if (pset
11749 || (!POINTER_TYPE_P (TREE_TYPE (type))
11750 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11751 WALK_SUBTREE (TREE_TYPE (type));
11752 WALK_SUBTREE (TYPE_DOMAIN (type));
11753 break;
11755 case OFFSET_TYPE:
11756 WALK_SUBTREE (TREE_TYPE (type));
11757 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11758 break;
11760 default:
11761 break;
11764 return NULL_TREE;
11767 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11768 called with the DATA and the address of each sub-tree. If FUNC returns a
11769 non-NULL value, the traversal is stopped, and the value returned by FUNC
11770 is returned. If PSET is non-NULL it is used to record the nodes visited,
11771 and to avoid visiting a node more than once. */
11773 tree
11774 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11775 hash_set<tree> *pset, walk_tree_lh lh)
11777 enum tree_code code;
11778 int walk_subtrees;
11779 tree result;
11781 #define WALK_SUBTREE_TAIL(NODE) \
11782 do \
11784 tp = & (NODE); \
11785 goto tail_recurse; \
11787 while (0)
11789 tail_recurse:
11790 /* Skip empty subtrees. */
11791 if (!*tp)
11792 return NULL_TREE;
11794 /* Don't walk the same tree twice, if the user has requested
11795 that we avoid doing so. */
11796 if (pset && pset->add (*tp))
11797 return NULL_TREE;
11799 /* Call the function. */
11800 walk_subtrees = 1;
11801 result = (*func) (tp, &walk_subtrees, data);
11803 /* If we found something, return it. */
11804 if (result)
11805 return result;
11807 code = TREE_CODE (*tp);
11809 /* Even if we didn't, FUNC may have decided that there was nothing
11810 interesting below this point in the tree. */
11811 if (!walk_subtrees)
11813 /* But we still need to check our siblings. */
11814 if (code == TREE_LIST)
11815 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11816 else if (code == OMP_CLAUSE)
11817 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11818 else
11819 return NULL_TREE;
11822 if (lh)
11824 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11825 if (result || !walk_subtrees)
11826 return result;
11829 switch (code)
11831 case ERROR_MARK:
11832 case IDENTIFIER_NODE:
11833 case INTEGER_CST:
11834 case REAL_CST:
11835 case FIXED_CST:
11836 case VECTOR_CST:
11837 case STRING_CST:
11838 case BLOCK:
11839 case PLACEHOLDER_EXPR:
11840 case SSA_NAME:
11841 case FIELD_DECL:
11842 case RESULT_DECL:
11843 /* None of these have subtrees other than those already walked
11844 above. */
11845 break;
11847 case TREE_LIST:
11848 WALK_SUBTREE (TREE_VALUE (*tp));
11849 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11850 break;
11852 case TREE_VEC:
11854 int len = TREE_VEC_LENGTH (*tp);
11856 if (len == 0)
11857 break;
11859 /* Walk all elements but the first. */
11860 while (--len)
11861 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11863 /* Now walk the first one as a tail call. */
11864 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11867 case COMPLEX_CST:
11868 WALK_SUBTREE (TREE_REALPART (*tp));
11869 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11871 case CONSTRUCTOR:
11873 unsigned HOST_WIDE_INT idx;
11874 constructor_elt *ce;
11876 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11877 idx++)
11878 WALK_SUBTREE (ce->value);
11880 break;
11882 case SAVE_EXPR:
11883 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11885 case BIND_EXPR:
11887 tree decl;
11888 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11890 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11891 into declarations that are just mentioned, rather than
11892 declared; they don't really belong to this part of the tree.
11893 And, we can see cycles: the initializer for a declaration
11894 can refer to the declaration itself. */
11895 WALK_SUBTREE (DECL_INITIAL (decl));
11896 WALK_SUBTREE (DECL_SIZE (decl));
11897 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11899 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11902 case STATEMENT_LIST:
11904 tree_stmt_iterator i;
11905 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11906 WALK_SUBTREE (*tsi_stmt_ptr (i));
11908 break;
11910 case OMP_CLAUSE:
11911 switch (OMP_CLAUSE_CODE (*tp))
11913 case OMP_CLAUSE_GANG:
11914 case OMP_CLAUSE__GRIDDIM_:
11915 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11916 /* FALLTHRU */
11918 case OMP_CLAUSE_ASYNC:
11919 case OMP_CLAUSE_WAIT:
11920 case OMP_CLAUSE_WORKER:
11921 case OMP_CLAUSE_VECTOR:
11922 case OMP_CLAUSE_NUM_GANGS:
11923 case OMP_CLAUSE_NUM_WORKERS:
11924 case OMP_CLAUSE_VECTOR_LENGTH:
11925 case OMP_CLAUSE_PRIVATE:
11926 case OMP_CLAUSE_SHARED:
11927 case OMP_CLAUSE_FIRSTPRIVATE:
11928 case OMP_CLAUSE_COPYIN:
11929 case OMP_CLAUSE_COPYPRIVATE:
11930 case OMP_CLAUSE_FINAL:
11931 case OMP_CLAUSE_IF:
11932 case OMP_CLAUSE_NUM_THREADS:
11933 case OMP_CLAUSE_SCHEDULE:
11934 case OMP_CLAUSE_UNIFORM:
11935 case OMP_CLAUSE_DEPEND:
11936 case OMP_CLAUSE_NONTEMPORAL:
11937 case OMP_CLAUSE_NUM_TEAMS:
11938 case OMP_CLAUSE_THREAD_LIMIT:
11939 case OMP_CLAUSE_DEVICE:
11940 case OMP_CLAUSE_DIST_SCHEDULE:
11941 case OMP_CLAUSE_SAFELEN:
11942 case OMP_CLAUSE_SIMDLEN:
11943 case OMP_CLAUSE_ORDERED:
11944 case OMP_CLAUSE_PRIORITY:
11945 case OMP_CLAUSE_GRAINSIZE:
11946 case OMP_CLAUSE_NUM_TASKS:
11947 case OMP_CLAUSE_HINT:
11948 case OMP_CLAUSE_TO_DECLARE:
11949 case OMP_CLAUSE_LINK:
11950 case OMP_CLAUSE_USE_DEVICE_PTR:
11951 case OMP_CLAUSE_IS_DEVICE_PTR:
11952 case OMP_CLAUSE__LOOPTEMP_:
11953 case OMP_CLAUSE__REDUCTEMP_:
11954 case OMP_CLAUSE__SIMDUID_:
11955 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11956 /* FALLTHRU */
11958 case OMP_CLAUSE_INDEPENDENT:
11959 case OMP_CLAUSE_NOWAIT:
11960 case OMP_CLAUSE_DEFAULT:
11961 case OMP_CLAUSE_UNTIED:
11962 case OMP_CLAUSE_MERGEABLE:
11963 case OMP_CLAUSE_PROC_BIND:
11964 case OMP_CLAUSE_INBRANCH:
11965 case OMP_CLAUSE_NOTINBRANCH:
11966 case OMP_CLAUSE_FOR:
11967 case OMP_CLAUSE_PARALLEL:
11968 case OMP_CLAUSE_SECTIONS:
11969 case OMP_CLAUSE_TASKGROUP:
11970 case OMP_CLAUSE_NOGROUP:
11971 case OMP_CLAUSE_THREADS:
11972 case OMP_CLAUSE_SIMD:
11973 case OMP_CLAUSE_DEFAULTMAP:
11974 case OMP_CLAUSE_AUTO:
11975 case OMP_CLAUSE_SEQ:
11976 case OMP_CLAUSE_TILE:
11977 case OMP_CLAUSE__SIMT_:
11978 case OMP_CLAUSE_IF_PRESENT:
11979 case OMP_CLAUSE_FINALIZE:
11980 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11982 case OMP_CLAUSE_LASTPRIVATE:
11983 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11984 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11985 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11987 case OMP_CLAUSE_COLLAPSE:
11989 int i;
11990 for (i = 0; i < 3; i++)
11991 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11992 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11995 case OMP_CLAUSE_LINEAR:
11996 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11997 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11998 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11999 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12001 case OMP_CLAUSE_ALIGNED:
12002 case OMP_CLAUSE_FROM:
12003 case OMP_CLAUSE_TO:
12004 case OMP_CLAUSE_MAP:
12005 case OMP_CLAUSE__CACHE_:
12006 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12007 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12008 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12010 case OMP_CLAUSE_REDUCTION:
12011 case OMP_CLAUSE_TASK_REDUCTION:
12012 case OMP_CLAUSE_IN_REDUCTION:
12014 int i;
12015 for (i = 0; i < 5; i++)
12016 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12017 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12020 default:
12021 gcc_unreachable ();
12023 break;
12025 case TARGET_EXPR:
12027 int i, len;
12029 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12030 But, we only want to walk once. */
12031 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12032 for (i = 0; i < len; ++i)
12033 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12034 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12037 case DECL_EXPR:
12038 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12039 defining. We only want to walk into these fields of a type in this
12040 case and not in the general case of a mere reference to the type.
12042 The criterion is as follows: if the field can be an expression, it
12043 must be walked only here. This should be in keeping with the fields
12044 that are directly gimplified in gimplify_type_sizes in order for the
12045 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12046 variable-sized types.
12048 Note that DECLs get walked as part of processing the BIND_EXPR. */
12049 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12051 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12052 if (TREE_CODE (*type_p) == ERROR_MARK)
12053 return NULL_TREE;
12055 /* Call the function for the type. See if it returns anything or
12056 doesn't want us to continue. If we are to continue, walk both
12057 the normal fields and those for the declaration case. */
12058 result = (*func) (type_p, &walk_subtrees, data);
12059 if (result || !walk_subtrees)
12060 return result;
12062 /* But do not walk a pointed-to type since it may itself need to
12063 be walked in the declaration case if it isn't anonymous. */
12064 if (!POINTER_TYPE_P (*type_p))
12066 result = walk_type_fields (*type_p, func, data, pset, lh);
12067 if (result)
12068 return result;
12071 /* If this is a record type, also walk the fields. */
12072 if (RECORD_OR_UNION_TYPE_P (*type_p))
12074 tree field;
12076 for (field = TYPE_FIELDS (*type_p); field;
12077 field = DECL_CHAIN (field))
12079 /* We'd like to look at the type of the field, but we can
12080 easily get infinite recursion. So assume it's pointed
12081 to elsewhere in the tree. Also, ignore things that
12082 aren't fields. */
12083 if (TREE_CODE (field) != FIELD_DECL)
12084 continue;
12086 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12087 WALK_SUBTREE (DECL_SIZE (field));
12088 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12089 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12090 WALK_SUBTREE (DECL_QUALIFIER (field));
12094 /* Same for scalar types. */
12095 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12096 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12097 || TREE_CODE (*type_p) == INTEGER_TYPE
12098 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12099 || TREE_CODE (*type_p) == REAL_TYPE)
12101 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12102 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12105 WALK_SUBTREE (TYPE_SIZE (*type_p));
12106 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12108 /* FALLTHRU */
12110 default:
12111 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12113 int i, len;
12115 /* Walk over all the sub-trees of this operand. */
12116 len = TREE_OPERAND_LENGTH (*tp);
12118 /* Go through the subtrees. We need to do this in forward order so
12119 that the scope of a FOR_EXPR is handled properly. */
12120 if (len)
12122 for (i = 0; i < len - 1; ++i)
12123 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12124 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12127 /* If this is a type, walk the needed fields in the type. */
12128 else if (TYPE_P (*tp))
12129 return walk_type_fields (*tp, func, data, pset, lh);
12130 break;
12133 /* We didn't find what we were looking for. */
12134 return NULL_TREE;
12136 #undef WALK_SUBTREE_TAIL
12138 #undef WALK_SUBTREE
12140 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12142 tree
12143 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12144 walk_tree_lh lh)
12146 tree result;
12148 hash_set<tree> pset;
12149 result = walk_tree_1 (tp, func, data, &pset, lh);
12150 return result;
12154 tree
12155 tree_block (tree t)
12157 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12159 if (IS_EXPR_CODE_CLASS (c))
12160 return LOCATION_BLOCK (t->exp.locus);
12161 gcc_unreachable ();
12162 return NULL;
12165 void
12166 tree_set_block (tree t, tree b)
12168 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12170 if (IS_EXPR_CODE_CLASS (c))
12172 t->exp.locus = set_block (t->exp.locus, b);
12174 else
12175 gcc_unreachable ();
12178 /* Create a nameless artificial label and put it in the current
12179 function context. The label has a location of LOC. Returns the
12180 newly created label. */
12182 tree
12183 create_artificial_label (location_t loc)
12185 tree lab = build_decl (loc,
12186 LABEL_DECL, NULL_TREE, void_type_node);
12188 DECL_ARTIFICIAL (lab) = 1;
12189 DECL_IGNORED_P (lab) = 1;
12190 DECL_CONTEXT (lab) = current_function_decl;
12191 return lab;
12194 /* Given a tree, try to return a useful variable name that we can use
12195 to prefix a temporary that is being assigned the value of the tree.
12196 I.E. given <temp> = &A, return A. */
12198 const char *
12199 get_name (tree t)
12201 tree stripped_decl;
12203 stripped_decl = t;
12204 STRIP_NOPS (stripped_decl);
12205 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12206 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12207 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12209 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12210 if (!name)
12211 return NULL;
12212 return IDENTIFIER_POINTER (name);
12214 else
12216 switch (TREE_CODE (stripped_decl))
12218 case ADDR_EXPR:
12219 return get_name (TREE_OPERAND (stripped_decl, 0));
12220 default:
12221 return NULL;
12226 /* Return true if TYPE has a variable argument list. */
12228 bool
12229 stdarg_p (const_tree fntype)
12231 function_args_iterator args_iter;
12232 tree n = NULL_TREE, t;
12234 if (!fntype)
12235 return false;
12237 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12239 n = t;
12242 return n != NULL_TREE && n != void_type_node;
12245 /* Return true if TYPE has a prototype. */
12247 bool
12248 prototype_p (const_tree fntype)
12250 tree t;
12252 gcc_assert (fntype != NULL_TREE);
12254 t = TYPE_ARG_TYPES (fntype);
12255 return (t != NULL_TREE);
12258 /* If BLOCK is inlined from an __attribute__((__artificial__))
12259 routine, return pointer to location from where it has been
12260 called. */
12261 location_t *
12262 block_nonartificial_location (tree block)
12264 location_t *ret = NULL;
12266 while (block && TREE_CODE (block) == BLOCK
12267 && BLOCK_ABSTRACT_ORIGIN (block))
12269 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12270 if (TREE_CODE (ao) == FUNCTION_DECL)
12272 /* If AO is an artificial inline, point RET to the
12273 call site locus at which it has been inlined and continue
12274 the loop, in case AO's caller is also an artificial
12275 inline. */
12276 if (DECL_DECLARED_INLINE_P (ao)
12277 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12278 ret = &BLOCK_SOURCE_LOCATION (block);
12279 else
12280 break;
12282 else if (TREE_CODE (ao) != BLOCK)
12283 break;
12285 block = BLOCK_SUPERCONTEXT (block);
12287 return ret;
12291 /* If EXP is inlined from an __attribute__((__artificial__))
12292 function, return the location of the original call expression. */
12294 location_t
12295 tree_nonartificial_location (tree exp)
12297 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12299 if (loc)
12300 return *loc;
12301 else
12302 return EXPR_LOCATION (exp);
12306 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12307 nodes. */
12309 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12311 hashval_t
12312 cl_option_hasher::hash (tree x)
12314 const_tree const t = x;
12315 const char *p;
12316 size_t i;
12317 size_t len = 0;
12318 hashval_t hash = 0;
12320 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12322 p = (const char *)TREE_OPTIMIZATION (t);
12323 len = sizeof (struct cl_optimization);
12326 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12327 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12329 else
12330 gcc_unreachable ();
12332 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12333 something else. */
12334 for (i = 0; i < len; i++)
12335 if (p[i])
12336 hash = (hash << 4) ^ ((i << 2) | p[i]);
12338 return hash;
12341 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12342 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12343 same. */
12345 bool
12346 cl_option_hasher::equal (tree x, tree y)
12348 const_tree const xt = x;
12349 const_tree const yt = y;
12351 if (TREE_CODE (xt) != TREE_CODE (yt))
12352 return 0;
12354 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12355 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12356 TREE_OPTIMIZATION (yt));
12357 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12358 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12359 TREE_TARGET_OPTION (yt));
12360 else
12361 gcc_unreachable ();
12364 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12366 tree
12367 build_optimization_node (struct gcc_options *opts)
12369 tree t;
12371 /* Use the cache of optimization nodes. */
12373 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12374 opts);
12376 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12377 t = *slot;
12378 if (!t)
12380 /* Insert this one into the hash table. */
12381 t = cl_optimization_node;
12382 *slot = t;
12384 /* Make a new node for next time round. */
12385 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12388 return t;
12391 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12393 tree
12394 build_target_option_node (struct gcc_options *opts)
12396 tree t;
12398 /* Use the cache of optimization nodes. */
12400 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12401 opts);
12403 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12404 t = *slot;
12405 if (!t)
12407 /* Insert this one into the hash table. */
12408 t = cl_target_option_node;
12409 *slot = t;
12411 /* Make a new node for next time round. */
12412 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12415 return t;
12418 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12419 so that they aren't saved during PCH writing. */
12421 void
12422 prepare_target_option_nodes_for_pch (void)
12424 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12425 for (; iter != cl_option_hash_table->end (); ++iter)
12426 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12427 TREE_TARGET_GLOBALS (*iter) = NULL;
12430 /* Determine the "ultimate origin" of a block. */
12432 tree
12433 block_ultimate_origin (const_tree block)
12435 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12437 if (origin == NULL_TREE)
12438 return NULL_TREE;
12439 else
12441 gcc_checking_assert ((DECL_P (origin)
12442 && DECL_ORIGIN (origin) == origin)
12443 || BLOCK_ORIGIN (origin) == origin);
12444 return origin;
12448 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12449 no instruction. */
12451 bool
12452 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12454 /* Do not strip casts into or out of differing address spaces. */
12455 if (POINTER_TYPE_P (outer_type)
12456 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12458 if (!POINTER_TYPE_P (inner_type)
12459 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12460 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12461 return false;
12463 else if (POINTER_TYPE_P (inner_type)
12464 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12466 /* We already know that outer_type is not a pointer with
12467 a non-generic address space. */
12468 return false;
12471 /* Use precision rather then machine mode when we can, which gives
12472 the correct answer even for submode (bit-field) types. */
12473 if ((INTEGRAL_TYPE_P (outer_type)
12474 || POINTER_TYPE_P (outer_type)
12475 || TREE_CODE (outer_type) == OFFSET_TYPE)
12476 && (INTEGRAL_TYPE_P (inner_type)
12477 || POINTER_TYPE_P (inner_type)
12478 || TREE_CODE (inner_type) == OFFSET_TYPE))
12479 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12481 /* Otherwise fall back on comparing machine modes (e.g. for
12482 aggregate types, floats). */
12483 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12486 /* Return true iff conversion in EXP generates no instruction. Mark
12487 it inline so that we fully inline into the stripping functions even
12488 though we have two uses of this function. */
12490 static inline bool
12491 tree_nop_conversion (const_tree exp)
12493 tree outer_type, inner_type;
12495 if (location_wrapper_p (exp))
12496 return true;
12497 if (!CONVERT_EXPR_P (exp)
12498 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12499 return false;
12500 if (TREE_OPERAND (exp, 0) == error_mark_node)
12501 return false;
12503 outer_type = TREE_TYPE (exp);
12504 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12506 if (!inner_type)
12507 return false;
12509 return tree_nop_conversion_p (outer_type, inner_type);
12512 /* Return true iff conversion in EXP generates no instruction. Don't
12513 consider conversions changing the signedness. */
12515 static bool
12516 tree_sign_nop_conversion (const_tree exp)
12518 tree outer_type, inner_type;
12520 if (!tree_nop_conversion (exp))
12521 return false;
12523 outer_type = TREE_TYPE (exp);
12524 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12526 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12527 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12530 /* Strip conversions from EXP according to tree_nop_conversion and
12531 return the resulting expression. */
12533 tree
12534 tree_strip_nop_conversions (tree exp)
12536 while (tree_nop_conversion (exp))
12537 exp = TREE_OPERAND (exp, 0);
12538 return exp;
12541 /* Strip conversions from EXP according to tree_sign_nop_conversion
12542 and return the resulting expression. */
12544 tree
12545 tree_strip_sign_nop_conversions (tree exp)
12547 while (tree_sign_nop_conversion (exp))
12548 exp = TREE_OPERAND (exp, 0);
12549 return exp;
12552 /* Avoid any floating point extensions from EXP. */
12553 tree
12554 strip_float_extensions (tree exp)
12556 tree sub, expt, subt;
12558 /* For floating point constant look up the narrowest type that can hold
12559 it properly and handle it like (type)(narrowest_type)constant.
12560 This way we can optimize for instance a=a*2.0 where "a" is float
12561 but 2.0 is double constant. */
12562 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12564 REAL_VALUE_TYPE orig;
12565 tree type = NULL;
12567 orig = TREE_REAL_CST (exp);
12568 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12569 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12570 type = float_type_node;
12571 else if (TYPE_PRECISION (TREE_TYPE (exp))
12572 > TYPE_PRECISION (double_type_node)
12573 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12574 type = double_type_node;
12575 if (type)
12576 return build_real_truncate (type, orig);
12579 if (!CONVERT_EXPR_P (exp))
12580 return exp;
12582 sub = TREE_OPERAND (exp, 0);
12583 subt = TREE_TYPE (sub);
12584 expt = TREE_TYPE (exp);
12586 if (!FLOAT_TYPE_P (subt))
12587 return exp;
12589 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12590 return exp;
12592 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12593 return exp;
12595 return strip_float_extensions (sub);
12598 /* Strip out all handled components that produce invariant
12599 offsets. */
12601 const_tree
12602 strip_invariant_refs (const_tree op)
12604 while (handled_component_p (op))
12606 switch (TREE_CODE (op))
12608 case ARRAY_REF:
12609 case ARRAY_RANGE_REF:
12610 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12611 || TREE_OPERAND (op, 2) != NULL_TREE
12612 || TREE_OPERAND (op, 3) != NULL_TREE)
12613 return NULL;
12614 break;
12616 case COMPONENT_REF:
12617 if (TREE_OPERAND (op, 2) != NULL_TREE)
12618 return NULL;
12619 break;
12621 default:;
12623 op = TREE_OPERAND (op, 0);
12626 return op;
12629 static GTY(()) tree gcc_eh_personality_decl;
12631 /* Return the GCC personality function decl. */
12633 tree
12634 lhd_gcc_personality (void)
12636 if (!gcc_eh_personality_decl)
12637 gcc_eh_personality_decl = build_personality_function ("gcc");
12638 return gcc_eh_personality_decl;
12641 /* TARGET is a call target of GIMPLE call statement
12642 (obtained by gimple_call_fn). Return true if it is
12643 OBJ_TYPE_REF representing an virtual call of C++ method.
12644 (As opposed to OBJ_TYPE_REF representing objc calls
12645 through a cast where middle-end devirtualization machinery
12646 can't apply.) */
12648 bool
12649 virtual_method_call_p (const_tree target)
12651 if (TREE_CODE (target) != OBJ_TYPE_REF)
12652 return false;
12653 tree t = TREE_TYPE (target);
12654 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12655 t = TREE_TYPE (t);
12656 if (TREE_CODE (t) == FUNCTION_TYPE)
12657 return false;
12658 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12659 /* If we do not have BINFO associated, it means that type was built
12660 without devirtualization enabled. Do not consider this a virtual
12661 call. */
12662 if (!TYPE_BINFO (obj_type_ref_class (target)))
12663 return false;
12664 return true;
12667 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12669 tree
12670 obj_type_ref_class (const_tree ref)
12672 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12673 ref = TREE_TYPE (ref);
12674 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12675 ref = TREE_TYPE (ref);
12676 /* We look for type THIS points to. ObjC also builds
12677 OBJ_TYPE_REF with non-method calls, Their first parameter
12678 ID however also corresponds to class type. */
12679 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12680 || TREE_CODE (ref) == FUNCTION_TYPE);
12681 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12682 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12683 return TREE_TYPE (ref);
12686 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12688 static tree
12689 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12691 unsigned int i;
12692 tree base_binfo, b;
12694 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12695 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12696 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12697 return base_binfo;
12698 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12699 return b;
12700 return NULL;
12703 /* Try to find a base info of BINFO that would have its field decl at offset
12704 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12705 found, return, otherwise return NULL_TREE. */
12707 tree
12708 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12710 tree type = BINFO_TYPE (binfo);
12712 while (true)
12714 HOST_WIDE_INT pos, size;
12715 tree fld;
12716 int i;
12718 if (types_same_for_odr (type, expected_type))
12719 return binfo;
12720 if (maybe_lt (offset, 0))
12721 return NULL_TREE;
12723 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12725 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12726 continue;
12728 pos = int_bit_position (fld);
12729 size = tree_to_uhwi (DECL_SIZE (fld));
12730 if (known_in_range_p (offset, pos, size))
12731 break;
12733 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12734 return NULL_TREE;
12736 /* Offset 0 indicates the primary base, whose vtable contents are
12737 represented in the binfo for the derived class. */
12738 else if (maybe_ne (offset, 0))
12740 tree found_binfo = NULL, base_binfo;
12741 /* Offsets in BINFO are in bytes relative to the whole structure
12742 while POS is in bits relative to the containing field. */
12743 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12744 / BITS_PER_UNIT);
12746 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12747 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12748 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12750 found_binfo = base_binfo;
12751 break;
12753 if (found_binfo)
12754 binfo = found_binfo;
12755 else
12756 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12757 binfo_offset);
12760 type = TREE_TYPE (fld);
12761 offset -= pos;
12765 /* Returns true if X is a typedef decl. */
12767 bool
12768 is_typedef_decl (const_tree x)
12770 return (x && TREE_CODE (x) == TYPE_DECL
12771 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12774 /* Returns true iff TYPE is a type variant created for a typedef. */
12776 bool
12777 typedef_variant_p (const_tree type)
12779 return is_typedef_decl (TYPE_NAME (type));
12782 /* A class to handle converting a string that might contain
12783 control characters, (eg newline, form-feed, etc), into one
12784 in which contains escape sequences instead. */
12786 class escaped_string
12788 public:
12789 escaped_string () { m_owned = false; m_str = NULL; };
12790 ~escaped_string () { if (m_owned) free (m_str); }
12791 operator const char *() const { return (const char *) m_str; }
12792 void escape (const char *);
12793 private:
12794 char *m_str;
12795 bool m_owned;
12798 /* PR 84195: Replace control characters in "unescaped" with their
12799 escaped equivalents. Allow newlines if -fmessage-length has
12800 been set to a non-zero value. This is done here, rather than
12801 where the attribute is recorded as the message length can
12802 change between these two locations. */
12804 void
12805 escaped_string::escape (const char *unescaped)
12807 char *escaped;
12808 size_t i, new_i, len;
12810 if (m_owned)
12811 free (m_str);
12813 m_str = const_cast<char *> (unescaped);
12814 m_owned = false;
12816 if (unescaped == NULL || *unescaped == 0)
12817 return;
12819 len = strlen (unescaped);
12820 escaped = NULL;
12821 new_i = 0;
12823 for (i = 0; i < len; i++)
12825 char c = unescaped[i];
12827 if (!ISCNTRL (c))
12829 if (escaped)
12830 escaped[new_i++] = c;
12831 continue;
12834 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12836 if (escaped == NULL)
12838 /* We only allocate space for a new string if we
12839 actually encounter a control character that
12840 needs replacing. */
12841 escaped = (char *) xmalloc (len * 2 + 1);
12842 strncpy (escaped, unescaped, i);
12843 new_i = i;
12846 escaped[new_i++] = '\\';
12848 switch (c)
12850 case '\a': escaped[new_i++] = 'a'; break;
12851 case '\b': escaped[new_i++] = 'b'; break;
12852 case '\f': escaped[new_i++] = 'f'; break;
12853 case '\n': escaped[new_i++] = 'n'; break;
12854 case '\r': escaped[new_i++] = 'r'; break;
12855 case '\t': escaped[new_i++] = 't'; break;
12856 case '\v': escaped[new_i++] = 'v'; break;
12857 default: escaped[new_i++] = '?'; break;
12860 else if (escaped)
12861 escaped[new_i++] = c;
12864 if (escaped)
12866 escaped[new_i] = 0;
12867 m_str = escaped;
12868 m_owned = true;
12872 /* Warn about a use of an identifier which was marked deprecated. Returns
12873 whether a warning was given. */
12875 bool
12876 warn_deprecated_use (tree node, tree attr)
12878 escaped_string msg;
12880 if (node == 0 || !warn_deprecated_decl)
12881 return false;
12883 if (!attr)
12885 if (DECL_P (node))
12886 attr = DECL_ATTRIBUTES (node);
12887 else if (TYPE_P (node))
12889 tree decl = TYPE_STUB_DECL (node);
12890 if (decl)
12891 attr = lookup_attribute ("deprecated",
12892 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12896 if (attr)
12897 attr = lookup_attribute ("deprecated", attr);
12899 if (attr)
12900 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12902 bool w = false;
12903 if (DECL_P (node))
12905 auto_diagnostic_group d;
12906 if (msg)
12907 w = warning (OPT_Wdeprecated_declarations,
12908 "%qD is deprecated: %s", node, (const char *) msg);
12909 else
12910 w = warning (OPT_Wdeprecated_declarations,
12911 "%qD is deprecated", node);
12912 if (w)
12913 inform (DECL_SOURCE_LOCATION (node), "declared here");
12915 else if (TYPE_P (node))
12917 tree what = NULL_TREE;
12918 tree decl = TYPE_STUB_DECL (node);
12920 if (TYPE_NAME (node))
12922 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12923 what = TYPE_NAME (node);
12924 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12925 && DECL_NAME (TYPE_NAME (node)))
12926 what = DECL_NAME (TYPE_NAME (node));
12929 auto_diagnostic_group d;
12930 if (what)
12932 if (msg)
12933 w = warning (OPT_Wdeprecated_declarations,
12934 "%qE is deprecated: %s", what, (const char *) msg);
12935 else
12936 w = warning (OPT_Wdeprecated_declarations,
12937 "%qE is deprecated", what);
12939 else
12941 if (msg)
12942 w = warning (OPT_Wdeprecated_declarations,
12943 "type is deprecated: %s", (const char *) msg);
12944 else
12945 w = warning (OPT_Wdeprecated_declarations,
12946 "type is deprecated");
12949 if (w && decl)
12950 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12953 return w;
12956 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12957 somewhere in it. */
12959 bool
12960 contains_bitfld_component_ref_p (const_tree ref)
12962 while (handled_component_p (ref))
12964 if (TREE_CODE (ref) == COMPONENT_REF
12965 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12966 return true;
12967 ref = TREE_OPERAND (ref, 0);
12970 return false;
12973 /* Try to determine whether a TRY_CATCH expression can fall through.
12974 This is a subroutine of block_may_fallthru. */
12976 static bool
12977 try_catch_may_fallthru (const_tree stmt)
12979 tree_stmt_iterator i;
12981 /* If the TRY block can fall through, the whole TRY_CATCH can
12982 fall through. */
12983 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12984 return true;
12986 i = tsi_start (TREE_OPERAND (stmt, 1));
12987 switch (TREE_CODE (tsi_stmt (i)))
12989 case CATCH_EXPR:
12990 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12991 catch expression and a body. The whole TRY_CATCH may fall
12992 through iff any of the catch bodies falls through. */
12993 for (; !tsi_end_p (i); tsi_next (&i))
12995 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12996 return true;
12998 return false;
13000 case EH_FILTER_EXPR:
13001 /* The exception filter expression only matters if there is an
13002 exception. If the exception does not match EH_FILTER_TYPES,
13003 we will execute EH_FILTER_FAILURE, and we will fall through
13004 if that falls through. If the exception does match
13005 EH_FILTER_TYPES, the stack unwinder will continue up the
13006 stack, so we will not fall through. We don't know whether we
13007 will throw an exception which matches EH_FILTER_TYPES or not,
13008 so we just ignore EH_FILTER_TYPES and assume that we might
13009 throw an exception which doesn't match. */
13010 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13012 default:
13013 /* This case represents statements to be executed when an
13014 exception occurs. Those statements are implicitly followed
13015 by a RESX statement to resume execution after the exception.
13016 So in this case the TRY_CATCH never falls through. */
13017 return false;
13021 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13022 need not be 100% accurate; simply be conservative and return true if we
13023 don't know. This is used only to avoid stupidly generating extra code.
13024 If we're wrong, we'll just delete the extra code later. */
13026 bool
13027 block_may_fallthru (const_tree block)
13029 /* This CONST_CAST is okay because expr_last returns its argument
13030 unmodified and we assign it to a const_tree. */
13031 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13033 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13035 case GOTO_EXPR:
13036 case RETURN_EXPR:
13037 /* Easy cases. If the last statement of the block implies
13038 control transfer, then we can't fall through. */
13039 return false;
13041 case SWITCH_EXPR:
13042 /* If there is a default: label or case labels cover all possible
13043 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13044 to some case label in all cases and all we care is whether the
13045 SWITCH_BODY falls through. */
13046 if (SWITCH_ALL_CASES_P (stmt))
13047 return block_may_fallthru (SWITCH_BODY (stmt));
13048 return true;
13050 case COND_EXPR:
13051 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13052 return true;
13053 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13055 case BIND_EXPR:
13056 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13058 case TRY_CATCH_EXPR:
13059 return try_catch_may_fallthru (stmt);
13061 case TRY_FINALLY_EXPR:
13062 /* The finally clause is always executed after the try clause,
13063 so if it does not fall through, then the try-finally will not
13064 fall through. Otherwise, if the try clause does not fall
13065 through, then when the finally clause falls through it will
13066 resume execution wherever the try clause was going. So the
13067 whole try-finally will only fall through if both the try
13068 clause and the finally clause fall through. */
13069 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13070 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13072 case MODIFY_EXPR:
13073 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13074 stmt = TREE_OPERAND (stmt, 1);
13075 else
13076 return true;
13077 /* FALLTHRU */
13079 case CALL_EXPR:
13080 /* Functions that do not return do not fall through. */
13081 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13083 case CLEANUP_POINT_EXPR:
13084 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13086 case TARGET_EXPR:
13087 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13089 case ERROR_MARK:
13090 return true;
13092 default:
13093 return lang_hooks.block_may_fallthru (stmt);
13097 /* True if we are using EH to handle cleanups. */
13098 static bool using_eh_for_cleanups_flag = false;
13100 /* This routine is called from front ends to indicate eh should be used for
13101 cleanups. */
13102 void
13103 using_eh_for_cleanups (void)
13105 using_eh_for_cleanups_flag = true;
13108 /* Query whether EH is used for cleanups. */
13109 bool
13110 using_eh_for_cleanups_p (void)
13112 return using_eh_for_cleanups_flag;
13115 /* Wrapper for tree_code_name to ensure that tree code is valid */
13116 const char *
13117 get_tree_code_name (enum tree_code code)
13119 const char *invalid = "<invalid tree code>";
13121 if (code >= MAX_TREE_CODES)
13122 return invalid;
13124 return tree_code_name[code];
13127 /* Drops the TREE_OVERFLOW flag from T. */
13129 tree
13130 drop_tree_overflow (tree t)
13132 gcc_checking_assert (TREE_OVERFLOW (t));
13134 /* For tree codes with a sharing machinery re-build the result. */
13135 if (poly_int_tree_p (t))
13136 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13138 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13139 and canonicalize the result. */
13140 if (TREE_CODE (t) == VECTOR_CST)
13142 tree_vector_builder builder;
13143 builder.new_unary_operation (TREE_TYPE (t), t, true);
13144 unsigned int count = builder.encoded_nelts ();
13145 for (unsigned int i = 0; i < count; ++i)
13147 tree elt = VECTOR_CST_ELT (t, i);
13148 if (TREE_OVERFLOW (elt))
13149 elt = drop_tree_overflow (elt);
13150 builder.quick_push (elt);
13152 return builder.build ();
13155 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13156 and drop the flag. */
13157 t = copy_node (t);
13158 TREE_OVERFLOW (t) = 0;
13160 /* For constants that contain nested constants, drop the flag
13161 from those as well. */
13162 if (TREE_CODE (t) == COMPLEX_CST)
13164 if (TREE_OVERFLOW (TREE_REALPART (t)))
13165 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13166 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13167 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13170 return t;
13173 /* Given a memory reference expression T, return its base address.
13174 The base address of a memory reference expression is the main
13175 object being referenced. For instance, the base address for
13176 'array[i].fld[j]' is 'array'. You can think of this as stripping
13177 away the offset part from a memory address.
13179 This function calls handled_component_p to strip away all the inner
13180 parts of the memory reference until it reaches the base object. */
13182 tree
13183 get_base_address (tree t)
13185 while (handled_component_p (t))
13186 t = TREE_OPERAND (t, 0);
13188 if ((TREE_CODE (t) == MEM_REF
13189 || TREE_CODE (t) == TARGET_MEM_REF)
13190 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13191 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13193 /* ??? Either the alias oracle or all callers need to properly deal
13194 with WITH_SIZE_EXPRs before we can look through those. */
13195 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13196 return NULL_TREE;
13198 return t;
13201 /* Return a tree of sizetype representing the size, in bytes, of the element
13202 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13204 tree
13205 array_ref_element_size (tree exp)
13207 tree aligned_size = TREE_OPERAND (exp, 3);
13208 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13209 location_t loc = EXPR_LOCATION (exp);
13211 /* If a size was specified in the ARRAY_REF, it's the size measured
13212 in alignment units of the element type. So multiply by that value. */
13213 if (aligned_size)
13215 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13216 sizetype from another type of the same width and signedness. */
13217 if (TREE_TYPE (aligned_size) != sizetype)
13218 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13219 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13220 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13223 /* Otherwise, take the size from that of the element type. Substitute
13224 any PLACEHOLDER_EXPR that we have. */
13225 else
13226 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13229 /* Return a tree representing the lower bound of the array mentioned in
13230 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13232 tree
13233 array_ref_low_bound (tree exp)
13235 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13237 /* If a lower bound is specified in EXP, use it. */
13238 if (TREE_OPERAND (exp, 2))
13239 return TREE_OPERAND (exp, 2);
13241 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13242 substituting for a PLACEHOLDER_EXPR as needed. */
13243 if (domain_type && TYPE_MIN_VALUE (domain_type))
13244 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13246 /* Otherwise, return a zero of the appropriate type. */
13247 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13250 /* Return a tree representing the upper bound of the array mentioned in
13251 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13253 tree
13254 array_ref_up_bound (tree exp)
13256 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13258 /* If there is a domain type and it has an upper bound, use it, substituting
13259 for a PLACEHOLDER_EXPR as needed. */
13260 if (domain_type && TYPE_MAX_VALUE (domain_type))
13261 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13263 /* Otherwise fail. */
13264 return NULL_TREE;
13267 /* Returns true if REF is an array reference or a component reference
13268 to an array at the end of a structure.
13269 If this is the case, the array may be allocated larger
13270 than its upper bound implies. */
13272 bool
13273 array_at_struct_end_p (tree ref)
13275 tree atype;
13277 if (TREE_CODE (ref) == ARRAY_REF
13278 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13280 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13281 ref = TREE_OPERAND (ref, 0);
13283 else if (TREE_CODE (ref) == COMPONENT_REF
13284 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13285 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13286 else
13287 return false;
13289 if (TREE_CODE (ref) == STRING_CST)
13290 return false;
13292 tree ref_to_array = ref;
13293 while (handled_component_p (ref))
13295 /* If the reference chain contains a component reference to a
13296 non-union type and there follows another field the reference
13297 is not at the end of a structure. */
13298 if (TREE_CODE (ref) == COMPONENT_REF)
13300 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13302 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13303 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13304 nextf = DECL_CHAIN (nextf);
13305 if (nextf)
13306 return false;
13309 /* If we have a multi-dimensional array we do not consider
13310 a non-innermost dimension as flex array if the whole
13311 multi-dimensional array is at struct end.
13312 Same for an array of aggregates with a trailing array
13313 member. */
13314 else if (TREE_CODE (ref) == ARRAY_REF)
13315 return false;
13316 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13318 /* If we view an underlying object as sth else then what we
13319 gathered up to now is what we have to rely on. */
13320 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13321 break;
13322 else
13323 gcc_unreachable ();
13325 ref = TREE_OPERAND (ref, 0);
13328 /* The array now is at struct end. Treat flexible arrays as
13329 always subject to extend, even into just padding constrained by
13330 an underlying decl. */
13331 if (! TYPE_SIZE (atype)
13332 || ! TYPE_DOMAIN (atype)
13333 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13334 return true;
13336 if (TREE_CODE (ref) == MEM_REF
13337 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13338 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13340 /* If the reference is based on a declared entity, the size of the array
13341 is constrained by its given domain. (Do not trust commons PR/69368). */
13342 if (DECL_P (ref)
13343 && !(flag_unconstrained_commons
13344 && VAR_P (ref) && DECL_COMMON (ref))
13345 && DECL_SIZE_UNIT (ref)
13346 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13348 /* Check whether the array domain covers all of the available
13349 padding. */
13350 poly_int64 offset;
13351 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13352 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13353 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13354 return true;
13355 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13356 return true;
13358 /* If at least one extra element fits it is a flexarray. */
13359 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13360 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13361 + 2)
13362 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13363 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13364 return true;
13366 return false;
13369 return true;
13372 /* Return a tree representing the offset, in bytes, of the field referenced
13373 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13375 tree
13376 component_ref_field_offset (tree exp)
13378 tree aligned_offset = TREE_OPERAND (exp, 2);
13379 tree field = TREE_OPERAND (exp, 1);
13380 location_t loc = EXPR_LOCATION (exp);
13382 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13383 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13384 value. */
13385 if (aligned_offset)
13387 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13388 sizetype from another type of the same width and signedness. */
13389 if (TREE_TYPE (aligned_offset) != sizetype)
13390 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13391 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13392 size_int (DECL_OFFSET_ALIGN (field)
13393 / BITS_PER_UNIT));
13396 /* Otherwise, take the offset from that of the field. Substitute
13397 any PLACEHOLDER_EXPR that we have. */
13398 else
13399 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13402 /* Return the machine mode of T. For vectors, returns the mode of the
13403 inner type. The main use case is to feed the result to HONOR_NANS,
13404 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13406 machine_mode
13407 element_mode (const_tree t)
13409 if (!TYPE_P (t))
13410 t = TREE_TYPE (t);
13411 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13412 t = TREE_TYPE (t);
13413 return TYPE_MODE (t);
13416 /* Vector types need to re-check the target flags each time we report
13417 the machine mode. We need to do this because attribute target can
13418 change the result of vector_mode_supported_p and have_regs_of_mode
13419 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13420 change on a per-function basis. */
13421 /* ??? Possibly a better solution is to run through all the types
13422 referenced by a function and re-compute the TYPE_MODE once, rather
13423 than make the TYPE_MODE macro call a function. */
13425 machine_mode
13426 vector_type_mode (const_tree t)
13428 machine_mode mode;
13430 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13432 mode = t->type_common.mode;
13433 if (VECTOR_MODE_P (mode)
13434 && (!targetm.vector_mode_supported_p (mode)
13435 || !have_regs_of_mode[mode]))
13437 scalar_int_mode innermode;
13439 /* For integers, try mapping it to a same-sized scalar mode. */
13440 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13442 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13443 * GET_MODE_BITSIZE (innermode));
13444 scalar_int_mode mode;
13445 if (int_mode_for_size (size, 0).exists (&mode)
13446 && have_regs_of_mode[mode])
13447 return mode;
13450 return BLKmode;
13453 return mode;
13456 /* Verify that basic properties of T match TV and thus T can be a variant of
13457 TV. TV should be the more specified variant (i.e. the main variant). */
13459 static bool
13460 verify_type_variant (const_tree t, tree tv)
13462 /* Type variant can differ by:
13464 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13465 ENCODE_QUAL_ADDR_SPACE.
13466 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13467 in this case some values may not be set in the variant types
13468 (see TYPE_COMPLETE_P checks).
13469 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13470 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13471 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13472 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13473 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13474 this is necessary to make it possible to merge types form different TUs
13475 - arrays, pointers and references may have TREE_TYPE that is a variant
13476 of TREE_TYPE of their main variants.
13477 - aggregates may have new TYPE_FIELDS list that list variants of
13478 the main variant TYPE_FIELDS.
13479 - vector types may differ by TYPE_VECTOR_OPAQUE
13482 /* Convenience macro for matching individual fields. */
13483 #define verify_variant_match(flag) \
13484 do { \
13485 if (flag (tv) != flag (t)) \
13487 error ("type variant differs by %s", #flag); \
13488 debug_tree (tv); \
13489 return false; \
13491 } while (false)
13493 /* tree_base checks. */
13495 verify_variant_match (TREE_CODE);
13496 /* FIXME: Ada builds non-artificial variants of artificial types. */
13497 if (TYPE_ARTIFICIAL (tv) && 0)
13498 verify_variant_match (TYPE_ARTIFICIAL);
13499 if (POINTER_TYPE_P (tv))
13500 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13501 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13502 verify_variant_match (TYPE_UNSIGNED);
13503 verify_variant_match (TYPE_PACKED);
13504 if (TREE_CODE (t) == REFERENCE_TYPE)
13505 verify_variant_match (TYPE_REF_IS_RVALUE);
13506 if (AGGREGATE_TYPE_P (t))
13507 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13508 else
13509 verify_variant_match (TYPE_SATURATING);
13510 /* FIXME: This check trigger during libstdc++ build. */
13511 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13512 verify_variant_match (TYPE_FINAL_P);
13514 /* tree_type_common checks. */
13516 if (COMPLETE_TYPE_P (t))
13518 verify_variant_match (TYPE_MODE);
13519 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13520 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13521 verify_variant_match (TYPE_SIZE);
13522 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13523 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13524 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13526 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13527 TYPE_SIZE_UNIT (tv), 0));
13528 error ("type variant has different TYPE_SIZE_UNIT");
13529 debug_tree (tv);
13530 error ("type variant's TYPE_SIZE_UNIT");
13531 debug_tree (TYPE_SIZE_UNIT (tv));
13532 error ("type's TYPE_SIZE_UNIT");
13533 debug_tree (TYPE_SIZE_UNIT (t));
13534 return false;
13537 verify_variant_match (TYPE_PRECISION);
13538 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13539 if (RECORD_OR_UNION_TYPE_P (t))
13540 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13541 else if (TREE_CODE (t) == ARRAY_TYPE)
13542 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13543 /* During LTO we merge variant lists from diferent translation units
13544 that may differ BY TYPE_CONTEXT that in turn may point
13545 to TRANSLATION_UNIT_DECL.
13546 Ada also builds variants of types with different TYPE_CONTEXT. */
13547 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13548 verify_variant_match (TYPE_CONTEXT);
13549 verify_variant_match (TYPE_STRING_FLAG);
13550 if (TYPE_ALIAS_SET_KNOWN_P (t))
13552 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13553 debug_tree (tv);
13554 return false;
13557 /* tree_type_non_common checks. */
13559 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13560 and dangle the pointer from time to time. */
13561 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13562 && (in_lto_p || !TYPE_VFIELD (tv)
13563 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13565 error ("type variant has different TYPE_VFIELD");
13566 debug_tree (tv);
13567 return false;
13569 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13570 || TREE_CODE (t) == INTEGER_TYPE
13571 || TREE_CODE (t) == BOOLEAN_TYPE
13572 || TREE_CODE (t) == REAL_TYPE
13573 || TREE_CODE (t) == FIXED_POINT_TYPE)
13575 verify_variant_match (TYPE_MAX_VALUE);
13576 verify_variant_match (TYPE_MIN_VALUE);
13578 if (TREE_CODE (t) == METHOD_TYPE)
13579 verify_variant_match (TYPE_METHOD_BASETYPE);
13580 if (TREE_CODE (t) == OFFSET_TYPE)
13581 verify_variant_match (TYPE_OFFSET_BASETYPE);
13582 if (TREE_CODE (t) == ARRAY_TYPE)
13583 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13584 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13585 or even type's main variant. This is needed to make bootstrap pass
13586 and the bug seems new in GCC 5.
13587 C++ FE should be updated to make this consistent and we should check
13588 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13589 is a match with main variant.
13591 Also disable the check for Java for now because of parser hack that builds
13592 first an dummy BINFO and then sometimes replace it by real BINFO in some
13593 of the copies. */
13594 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13595 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13596 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13597 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13598 at LTO time only. */
13599 && (in_lto_p && odr_type_p (t)))
13601 error ("type variant has different TYPE_BINFO");
13602 debug_tree (tv);
13603 error ("type variant's TYPE_BINFO");
13604 debug_tree (TYPE_BINFO (tv));
13605 error ("type's TYPE_BINFO");
13606 debug_tree (TYPE_BINFO (t));
13607 return false;
13610 /* Check various uses of TYPE_VALUES_RAW. */
13611 if (TREE_CODE (t) == ENUMERAL_TYPE
13612 && TYPE_VALUES (t))
13613 verify_variant_match (TYPE_VALUES);
13614 else if (TREE_CODE (t) == ARRAY_TYPE)
13615 verify_variant_match (TYPE_DOMAIN);
13616 /* Permit incomplete variants of complete type. While FEs may complete
13617 all variants, this does not happen for C++ templates in all cases. */
13618 else if (RECORD_OR_UNION_TYPE_P (t)
13619 && COMPLETE_TYPE_P (t)
13620 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13622 tree f1, f2;
13624 /* Fortran builds qualified variants as new records with items of
13625 qualified type. Verify that they looks same. */
13626 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13627 f1 && f2;
13628 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13629 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13630 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13631 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13632 /* FIXME: gfc_nonrestricted_type builds all types as variants
13633 with exception of pointer types. It deeply copies the type
13634 which means that we may end up with a variant type
13635 referring non-variant pointer. We may change it to
13636 produce types as variants, too, like
13637 objc_get_protocol_qualified_type does. */
13638 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13639 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13640 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13641 break;
13642 if (f1 || f2)
13644 error ("type variant has different TYPE_FIELDS");
13645 debug_tree (tv);
13646 error ("first mismatch is field");
13647 debug_tree (f1);
13648 error ("and field");
13649 debug_tree (f2);
13650 return false;
13653 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13654 verify_variant_match (TYPE_ARG_TYPES);
13655 /* For C++ the qualified variant of array type is really an array type
13656 of qualified TREE_TYPE.
13657 objc builds variants of pointer where pointer to type is a variant, too
13658 in objc_get_protocol_qualified_type. */
13659 if (TREE_TYPE (t) != TREE_TYPE (tv)
13660 && ((TREE_CODE (t) != ARRAY_TYPE
13661 && !POINTER_TYPE_P (t))
13662 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13663 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13665 error ("type variant has different TREE_TYPE");
13666 debug_tree (tv);
13667 error ("type variant's TREE_TYPE");
13668 debug_tree (TREE_TYPE (tv));
13669 error ("type's TREE_TYPE");
13670 debug_tree (TREE_TYPE (t));
13671 return false;
13673 if (type_with_alias_set_p (t)
13674 && !gimple_canonical_types_compatible_p (t, tv, false))
13676 error ("type is not compatible with its variant");
13677 debug_tree (tv);
13678 error ("type variant's TREE_TYPE");
13679 debug_tree (TREE_TYPE (tv));
13680 error ("type's TREE_TYPE");
13681 debug_tree (TREE_TYPE (t));
13682 return false;
13684 return true;
13685 #undef verify_variant_match
13689 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13690 the middle-end types_compatible_p function. It needs to avoid
13691 claiming types are different for types that should be treated
13692 the same with respect to TBAA. Canonical types are also used
13693 for IL consistency checks via the useless_type_conversion_p
13694 predicate which does not handle all type kinds itself but falls
13695 back to pointer-comparison of TYPE_CANONICAL for aggregates
13696 for example. */
13698 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13699 type calculation because we need to allow inter-operability between signed
13700 and unsigned variants. */
13702 bool
13703 type_with_interoperable_signedness (const_tree type)
13705 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13706 signed char and unsigned char. Similarly fortran FE builds
13707 C_SIZE_T as signed type, while C defines it unsigned. */
13709 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13710 == INTEGER_TYPE
13711 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13712 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13715 /* Return true iff T1 and T2 are structurally identical for what
13716 TBAA is concerned.
13717 This function is used both by lto.c canonical type merging and by the
13718 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13719 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13720 only for LTO because only in these cases TYPE_CANONICAL equivalence
13721 correspond to one defined by gimple_canonical_types_compatible_p. */
13723 bool
13724 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13725 bool trust_type_canonical)
13727 /* Type variants should be same as the main variant. When not doing sanity
13728 checking to verify this fact, go to main variants and save some work. */
13729 if (trust_type_canonical)
13731 t1 = TYPE_MAIN_VARIANT (t1);
13732 t2 = TYPE_MAIN_VARIANT (t2);
13735 /* Check first for the obvious case of pointer identity. */
13736 if (t1 == t2)
13737 return true;
13739 /* Check that we have two types to compare. */
13740 if (t1 == NULL_TREE || t2 == NULL_TREE)
13741 return false;
13743 /* We consider complete types always compatible with incomplete type.
13744 This does not make sense for canonical type calculation and thus we
13745 need to ensure that we are never called on it.
13747 FIXME: For more correctness the function probably should have three modes
13748 1) mode assuming that types are complete mathcing their structure
13749 2) mode allowing incomplete types but producing equivalence classes
13750 and thus ignoring all info from complete types
13751 3) mode allowing incomplete types to match complete but checking
13752 compatibility between complete types.
13754 1 and 2 can be used for canonical type calculation. 3 is the real
13755 definition of type compatibility that can be used i.e. for warnings during
13756 declaration merging. */
13758 gcc_assert (!trust_type_canonical
13759 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13760 /* If the types have been previously registered and found equal
13761 they still are. */
13763 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13764 && trust_type_canonical)
13766 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13767 they are always NULL, but they are set to non-NULL for types
13768 constructed by build_pointer_type and variants. In this case the
13769 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13770 all pointers are considered equal. Be sure to not return false
13771 negatives. */
13772 gcc_checking_assert (canonical_type_used_p (t1)
13773 && canonical_type_used_p (t2));
13774 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13777 /* Can't be the same type if the types don't have the same code. */
13778 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13779 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13780 return false;
13782 /* Qualifiers do not matter for canonical type comparison purposes. */
13784 /* Void types and nullptr types are always the same. */
13785 if (TREE_CODE (t1) == VOID_TYPE
13786 || TREE_CODE (t1) == NULLPTR_TYPE)
13787 return true;
13789 /* Can't be the same type if they have different mode. */
13790 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13791 return false;
13793 /* Non-aggregate types can be handled cheaply. */
13794 if (INTEGRAL_TYPE_P (t1)
13795 || SCALAR_FLOAT_TYPE_P (t1)
13796 || FIXED_POINT_TYPE_P (t1)
13797 || TREE_CODE (t1) == VECTOR_TYPE
13798 || TREE_CODE (t1) == COMPLEX_TYPE
13799 || TREE_CODE (t1) == OFFSET_TYPE
13800 || POINTER_TYPE_P (t1))
13802 /* Can't be the same type if they have different recision. */
13803 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13804 return false;
13806 /* In some cases the signed and unsigned types are required to be
13807 inter-operable. */
13808 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13809 && !type_with_interoperable_signedness (t1))
13810 return false;
13812 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13813 interoperable with "signed char". Unless all frontends are revisited
13814 to agree on these types, we must ignore the flag completely. */
13816 /* Fortran standard define C_PTR type that is compatible with every
13817 C pointer. For this reason we need to glob all pointers into one.
13818 Still pointers in different address spaces are not compatible. */
13819 if (POINTER_TYPE_P (t1))
13821 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13822 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13823 return false;
13826 /* Tail-recurse to components. */
13827 if (TREE_CODE (t1) == VECTOR_TYPE
13828 || TREE_CODE (t1) == COMPLEX_TYPE)
13829 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13830 TREE_TYPE (t2),
13831 trust_type_canonical);
13833 return true;
13836 /* Do type-specific comparisons. */
13837 switch (TREE_CODE (t1))
13839 case ARRAY_TYPE:
13840 /* Array types are the same if the element types are the same and
13841 the number of elements are the same. */
13842 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13843 trust_type_canonical)
13844 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13845 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13846 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13847 return false;
13848 else
13850 tree i1 = TYPE_DOMAIN (t1);
13851 tree i2 = TYPE_DOMAIN (t2);
13853 /* For an incomplete external array, the type domain can be
13854 NULL_TREE. Check this condition also. */
13855 if (i1 == NULL_TREE && i2 == NULL_TREE)
13856 return true;
13857 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13858 return false;
13859 else
13861 tree min1 = TYPE_MIN_VALUE (i1);
13862 tree min2 = TYPE_MIN_VALUE (i2);
13863 tree max1 = TYPE_MAX_VALUE (i1);
13864 tree max2 = TYPE_MAX_VALUE (i2);
13866 /* The minimum/maximum values have to be the same. */
13867 if ((min1 == min2
13868 || (min1 && min2
13869 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13870 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13871 || operand_equal_p (min1, min2, 0))))
13872 && (max1 == max2
13873 || (max1 && max2
13874 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13875 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13876 || operand_equal_p (max1, max2, 0)))))
13877 return true;
13878 else
13879 return false;
13883 case METHOD_TYPE:
13884 case FUNCTION_TYPE:
13885 /* Function types are the same if the return type and arguments types
13886 are the same. */
13887 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13888 trust_type_canonical))
13889 return false;
13891 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13892 return true;
13893 else
13895 tree parms1, parms2;
13897 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13898 parms1 && parms2;
13899 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13901 if (!gimple_canonical_types_compatible_p
13902 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13903 trust_type_canonical))
13904 return false;
13907 if (parms1 || parms2)
13908 return false;
13910 return true;
13913 case RECORD_TYPE:
13914 case UNION_TYPE:
13915 case QUAL_UNION_TYPE:
13917 tree f1, f2;
13919 /* Don't try to compare variants of an incomplete type, before
13920 TYPE_FIELDS has been copied around. */
13921 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13922 return true;
13925 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13926 return false;
13928 /* For aggregate types, all the fields must be the same. */
13929 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13930 f1 || f2;
13931 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13933 /* Skip non-fields and zero-sized fields. */
13934 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13935 || (DECL_SIZE (f1)
13936 && integer_zerop (DECL_SIZE (f1)))))
13937 f1 = TREE_CHAIN (f1);
13938 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13939 || (DECL_SIZE (f2)
13940 && integer_zerop (DECL_SIZE (f2)))))
13941 f2 = TREE_CHAIN (f2);
13942 if (!f1 || !f2)
13943 break;
13944 /* The fields must have the same name, offset and type. */
13945 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13946 || !gimple_compare_field_offset (f1, f2)
13947 || !gimple_canonical_types_compatible_p
13948 (TREE_TYPE (f1), TREE_TYPE (f2),
13949 trust_type_canonical))
13950 return false;
13953 /* If one aggregate has more fields than the other, they
13954 are not the same. */
13955 if (f1 || f2)
13956 return false;
13958 return true;
13961 default:
13962 /* Consider all types with language specific trees in them mutually
13963 compatible. This is executed only from verify_type and false
13964 positives can be tolerated. */
13965 gcc_assert (!in_lto_p);
13966 return true;
13970 /* Verify type T. */
13972 void
13973 verify_type (const_tree t)
13975 bool error_found = false;
13976 tree mv = TYPE_MAIN_VARIANT (t);
13977 if (!mv)
13979 error ("Main variant is not defined");
13980 error_found = true;
13982 else if (mv != TYPE_MAIN_VARIANT (mv))
13984 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13985 debug_tree (mv);
13986 error_found = true;
13988 else if (t != mv && !verify_type_variant (t, mv))
13989 error_found = true;
13991 tree ct = TYPE_CANONICAL (t);
13992 if (!ct)
13994 else if (TYPE_CANONICAL (t) != ct)
13996 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13997 debug_tree (ct);
13998 error_found = true;
14000 /* Method and function types can not be used to address memory and thus
14001 TYPE_CANONICAL really matters only for determining useless conversions.
14003 FIXME: C++ FE produce declarations of builtin functions that are not
14004 compatible with main variants. */
14005 else if (TREE_CODE (t) == FUNCTION_TYPE)
14007 else if (t != ct
14008 /* FIXME: gimple_canonical_types_compatible_p can not compare types
14009 with variably sized arrays because their sizes possibly
14010 gimplified to different variables. */
14011 && !variably_modified_type_p (ct, NULL)
14012 && !gimple_canonical_types_compatible_p (t, ct, false)
14013 && COMPLETE_TYPE_P (t))
14015 error ("TYPE_CANONICAL is not compatible");
14016 debug_tree (ct);
14017 error_found = true;
14020 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14021 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14023 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
14024 debug_tree (ct);
14025 error_found = true;
14027 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14029 error ("TYPE_CANONICAL of main variant is not main variant");
14030 debug_tree (ct);
14031 debug_tree (TYPE_MAIN_VARIANT (ct));
14032 error_found = true;
14036 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14037 if (RECORD_OR_UNION_TYPE_P (t))
14039 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14040 and danagle the pointer from time to time. */
14041 if (TYPE_VFIELD (t)
14042 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14043 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14045 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
14046 debug_tree (TYPE_VFIELD (t));
14047 error_found = true;
14050 else if (TREE_CODE (t) == POINTER_TYPE)
14052 if (TYPE_NEXT_PTR_TO (t)
14053 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14055 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
14056 debug_tree (TYPE_NEXT_PTR_TO (t));
14057 error_found = true;
14060 else if (TREE_CODE (t) == REFERENCE_TYPE)
14062 if (TYPE_NEXT_REF_TO (t)
14063 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14065 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
14066 debug_tree (TYPE_NEXT_REF_TO (t));
14067 error_found = true;
14070 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14071 || TREE_CODE (t) == FIXED_POINT_TYPE)
14073 /* FIXME: The following check should pass:
14074 useless_type_conversion_p (const_cast <tree> (t),
14075 TREE_TYPE (TYPE_MIN_VALUE (t))
14076 but does not for C sizetypes in LTO. */
14079 /* Check various uses of TYPE_MAXVAL_RAW. */
14080 if (RECORD_OR_UNION_TYPE_P (t))
14082 if (!TYPE_BINFO (t))
14084 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14086 error ("TYPE_BINFO is not TREE_BINFO");
14087 debug_tree (TYPE_BINFO (t));
14088 error_found = true;
14090 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14092 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
14093 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14094 error_found = true;
14097 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14099 if (TYPE_METHOD_BASETYPE (t)
14100 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14101 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14103 error ("TYPE_METHOD_BASETYPE is not record nor union");
14104 debug_tree (TYPE_METHOD_BASETYPE (t));
14105 error_found = true;
14108 else if (TREE_CODE (t) == OFFSET_TYPE)
14110 if (TYPE_OFFSET_BASETYPE (t)
14111 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14112 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14114 error ("TYPE_OFFSET_BASETYPE is not record nor union");
14115 debug_tree (TYPE_OFFSET_BASETYPE (t));
14116 error_found = true;
14119 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14120 || TREE_CODE (t) == FIXED_POINT_TYPE)
14122 /* FIXME: The following check should pass:
14123 useless_type_conversion_p (const_cast <tree> (t),
14124 TREE_TYPE (TYPE_MAX_VALUE (t))
14125 but does not for C sizetypes in LTO. */
14127 else if (TREE_CODE (t) == ARRAY_TYPE)
14129 if (TYPE_ARRAY_MAX_SIZE (t)
14130 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14132 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
14133 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14134 error_found = true;
14137 else if (TYPE_MAX_VALUE_RAW (t))
14139 error ("TYPE_MAX_VALUE_RAW non-NULL");
14140 debug_tree (TYPE_MAX_VALUE_RAW (t));
14141 error_found = true;
14144 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14146 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
14147 debug_tree (TYPE_LANG_SLOT_1 (t));
14148 error_found = true;
14151 /* Check various uses of TYPE_VALUES_RAW. */
14152 if (TREE_CODE (t) == ENUMERAL_TYPE)
14153 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14155 tree value = TREE_VALUE (l);
14156 tree name = TREE_PURPOSE (l);
14158 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14159 CONST_DECL of ENUMERAL TYPE. */
14160 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14162 error ("Enum value is not CONST_DECL or INTEGER_CST");
14163 debug_tree (value);
14164 debug_tree (name);
14165 error_found = true;
14167 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14168 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14170 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
14171 debug_tree (value);
14172 debug_tree (name);
14173 error_found = true;
14175 if (TREE_CODE (name) != IDENTIFIER_NODE)
14177 error ("Enum value name is not IDENTIFIER_NODE");
14178 debug_tree (value);
14179 debug_tree (name);
14180 error_found = true;
14183 else if (TREE_CODE (t) == ARRAY_TYPE)
14185 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14187 error ("Array TYPE_DOMAIN is not integer type");
14188 debug_tree (TYPE_DOMAIN (t));
14189 error_found = true;
14192 else if (RECORD_OR_UNION_TYPE_P (t))
14194 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14196 error ("TYPE_FIELDS defined in incomplete type");
14197 error_found = true;
14199 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14201 /* TODO: verify properties of decls. */
14202 if (TREE_CODE (fld) == FIELD_DECL)
14204 else if (TREE_CODE (fld) == TYPE_DECL)
14206 else if (TREE_CODE (fld) == CONST_DECL)
14208 else if (VAR_P (fld))
14210 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14212 else if (TREE_CODE (fld) == USING_DECL)
14214 else if (TREE_CODE (fld) == FUNCTION_DECL)
14216 else
14218 error ("Wrong tree in TYPE_FIELDS list");
14219 debug_tree (fld);
14220 error_found = true;
14224 else if (TREE_CODE (t) == INTEGER_TYPE
14225 || TREE_CODE (t) == BOOLEAN_TYPE
14226 || TREE_CODE (t) == OFFSET_TYPE
14227 || TREE_CODE (t) == REFERENCE_TYPE
14228 || TREE_CODE (t) == NULLPTR_TYPE
14229 || TREE_CODE (t) == POINTER_TYPE)
14231 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14233 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
14234 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14235 error_found = true;
14237 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14239 error ("TYPE_CACHED_VALUES is not TREE_VEC");
14240 debug_tree (TYPE_CACHED_VALUES (t));
14241 error_found = true;
14243 /* Verify just enough of cache to ensure that no one copied it to new type.
14244 All copying should go by copy_node that should clear it. */
14245 else if (TYPE_CACHED_VALUES_P (t))
14247 int i;
14248 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14249 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14250 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14252 error ("wrong TYPE_CACHED_VALUES entry");
14253 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14254 error_found = true;
14255 break;
14259 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14260 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14262 /* C++ FE uses TREE_PURPOSE to store initial values. */
14263 if (TREE_PURPOSE (l) && in_lto_p)
14265 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
14266 debug_tree (l);
14267 error_found = true;
14269 if (!TYPE_P (TREE_VALUE (l)))
14271 error ("Wrong entry in TYPE_ARG_TYPES list");
14272 debug_tree (l);
14273 error_found = true;
14276 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14278 error ("TYPE_VALUES_RAW field is non-NULL");
14279 debug_tree (TYPE_VALUES_RAW (t));
14280 error_found = true;
14282 if (TREE_CODE (t) != INTEGER_TYPE
14283 && TREE_CODE (t) != BOOLEAN_TYPE
14284 && TREE_CODE (t) != OFFSET_TYPE
14285 && TREE_CODE (t) != REFERENCE_TYPE
14286 && TREE_CODE (t) != NULLPTR_TYPE
14287 && TREE_CODE (t) != POINTER_TYPE
14288 && TYPE_CACHED_VALUES_P (t))
14290 error ("TYPE_CACHED_VALUES_P is set while it should not");
14291 error_found = true;
14293 if (TYPE_STRING_FLAG (t)
14294 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
14296 error ("TYPE_STRING_FLAG is set on wrong type code");
14297 error_found = true;
14300 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14301 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14302 of a type. */
14303 if (TREE_CODE (t) == METHOD_TYPE
14304 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14306 error ("TYPE_METHOD_BASETYPE is not main variant");
14307 error_found = true;
14310 if (error_found)
14312 debug_tree (const_cast <tree> (t));
14313 internal_error ("verify_type failed");
14318 /* Return 1 if ARG interpreted as signed in its precision is known to be
14319 always positive or 2 if ARG is known to be always negative, or 3 if
14320 ARG may be positive or negative. */
14323 get_range_pos_neg (tree arg)
14325 if (arg == error_mark_node)
14326 return 3;
14328 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14329 int cnt = 0;
14330 if (TREE_CODE (arg) == INTEGER_CST)
14332 wide_int w = wi::sext (wi::to_wide (arg), prec);
14333 if (wi::neg_p (w))
14334 return 2;
14335 else
14336 return 1;
14338 while (CONVERT_EXPR_P (arg)
14339 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14340 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14342 arg = TREE_OPERAND (arg, 0);
14343 /* Narrower value zero extended into wider type
14344 will always result in positive values. */
14345 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14346 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14347 return 1;
14348 prec = TYPE_PRECISION (TREE_TYPE (arg));
14349 if (++cnt > 30)
14350 return 3;
14353 if (TREE_CODE (arg) != SSA_NAME)
14354 return 3;
14355 wide_int arg_min, arg_max;
14356 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14358 gimple *g = SSA_NAME_DEF_STMT (arg);
14359 if (is_gimple_assign (g)
14360 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14362 tree t = gimple_assign_rhs1 (g);
14363 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14364 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14366 if (TYPE_UNSIGNED (TREE_TYPE (t))
14367 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14368 return 1;
14369 prec = TYPE_PRECISION (TREE_TYPE (t));
14370 arg = t;
14371 if (++cnt > 30)
14372 return 3;
14373 continue;
14376 return 3;
14378 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14380 /* For unsigned values, the "positive" range comes
14381 below the "negative" range. */
14382 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14383 return 1;
14384 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14385 return 2;
14387 else
14389 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14390 return 1;
14391 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14392 return 2;
14394 return 3;
14400 /* Return true if ARG is marked with the nonnull attribute in the
14401 current function signature. */
14403 bool
14404 nonnull_arg_p (const_tree arg)
14406 tree t, attrs, fntype;
14407 unsigned HOST_WIDE_INT arg_num;
14409 gcc_assert (TREE_CODE (arg) == PARM_DECL
14410 && (POINTER_TYPE_P (TREE_TYPE (arg))
14411 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14413 /* The static chain decl is always non null. */
14414 if (arg == cfun->static_chain_decl)
14415 return true;
14417 /* THIS argument of method is always non-NULL. */
14418 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14419 && arg == DECL_ARGUMENTS (cfun->decl)
14420 && flag_delete_null_pointer_checks)
14421 return true;
14423 /* Values passed by reference are always non-NULL. */
14424 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14425 && flag_delete_null_pointer_checks)
14426 return true;
14428 fntype = TREE_TYPE (cfun->decl);
14429 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14431 attrs = lookup_attribute ("nonnull", attrs);
14433 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14434 if (attrs == NULL_TREE)
14435 return false;
14437 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14438 if (TREE_VALUE (attrs) == NULL_TREE)
14439 return true;
14441 /* Get the position number for ARG in the function signature. */
14442 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14444 t = DECL_CHAIN (t), arg_num++)
14446 if (t == arg)
14447 break;
14450 gcc_assert (t == arg);
14452 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14453 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14455 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14456 return true;
14460 return false;
14463 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14464 information. */
14466 location_t
14467 set_block (location_t loc, tree block)
14469 location_t pure_loc = get_pure_location (loc);
14470 source_range src_range = get_range_from_loc (line_table, loc);
14471 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14474 location_t
14475 set_source_range (tree expr, location_t start, location_t finish)
14477 source_range src_range;
14478 src_range.m_start = start;
14479 src_range.m_finish = finish;
14480 return set_source_range (expr, src_range);
14483 location_t
14484 set_source_range (tree expr, source_range src_range)
14486 if (!EXPR_P (expr))
14487 return UNKNOWN_LOCATION;
14489 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14490 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14491 pure_loc,
14492 src_range,
14493 NULL);
14494 SET_EXPR_LOCATION (expr, adhoc);
14495 return adhoc;
14498 /* Return EXPR, potentially wrapped with a node expression LOC,
14499 if !CAN_HAVE_LOCATION_P (expr).
14501 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14502 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14504 Wrapper nodes can be identified using location_wrapper_p. */
14506 tree
14507 maybe_wrap_with_location (tree expr, location_t loc)
14509 if (expr == NULL)
14510 return NULL;
14511 if (loc == UNKNOWN_LOCATION)
14512 return expr;
14513 if (CAN_HAVE_LOCATION_P (expr))
14514 return expr;
14515 /* We should only be adding wrappers for constants and for decls,
14516 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14517 gcc_assert (CONSTANT_CLASS_P (expr)
14518 || DECL_P (expr)
14519 || EXCEPTIONAL_CLASS_P (expr));
14521 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14522 any impact of the wrapper nodes. */
14523 if (EXCEPTIONAL_CLASS_P (expr))
14524 return expr;
14526 tree_code code
14527 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14528 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14529 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14530 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14531 /* Mark this node as being a wrapper. */
14532 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14533 return wrapper;
14536 /* Return the name of combined function FN, for debugging purposes. */
14538 const char *
14539 combined_fn_name (combined_fn fn)
14541 if (builtin_fn_p (fn))
14543 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14544 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14546 else
14547 return internal_fn_name (as_internal_fn (fn));
14550 /* Return a bitmap with a bit set corresponding to each argument in
14551 a function call type FNTYPE declared with attribute nonnull,
14552 or null if none of the function's argument are nonnull. The caller
14553 must free the bitmap. */
14555 bitmap
14556 get_nonnull_args (const_tree fntype)
14558 if (fntype == NULL_TREE)
14559 return NULL;
14561 tree attrs = TYPE_ATTRIBUTES (fntype);
14562 if (!attrs)
14563 return NULL;
14565 bitmap argmap = NULL;
14567 /* A function declaration can specify multiple attribute nonnull,
14568 each with zero or more arguments. The loop below creates a bitmap
14569 representing a union of all the arguments. An empty (but non-null)
14570 bitmap means that all arguments have been declaraed nonnull. */
14571 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14573 attrs = lookup_attribute ("nonnull", attrs);
14574 if (!attrs)
14575 break;
14577 if (!argmap)
14578 argmap = BITMAP_ALLOC (NULL);
14580 if (!TREE_VALUE (attrs))
14582 /* Clear the bitmap in case a previous attribute nonnull
14583 set it and this one overrides it for all arguments. */
14584 bitmap_clear (argmap);
14585 return argmap;
14588 /* Iterate over the indices of the format arguments declared nonnull
14589 and set a bit for each. */
14590 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14592 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14593 bitmap_set_bit (argmap, val);
14597 return argmap;
14600 /* Returns true if TYPE is a type where it and all of its subobjects
14601 (recursively) are of structure, union, or array type. */
14603 static bool
14604 default_is_empty_type (tree type)
14606 if (RECORD_OR_UNION_TYPE_P (type))
14608 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14609 if (TREE_CODE (field) == FIELD_DECL
14610 && !DECL_PADDING_P (field)
14611 && !default_is_empty_type (TREE_TYPE (field)))
14612 return false;
14613 return true;
14615 else if (TREE_CODE (type) == ARRAY_TYPE)
14616 return (integer_minus_onep (array_type_nelts (type))
14617 || TYPE_DOMAIN (type) == NULL_TREE
14618 || default_is_empty_type (TREE_TYPE (type)));
14619 return false;
14622 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14623 that shouldn't be passed via stack. */
14625 bool
14626 default_is_empty_record (const_tree type)
14628 if (!abi_version_at_least (12))
14629 return false;
14631 if (type == error_mark_node)
14632 return false;
14634 if (TREE_ADDRESSABLE (type))
14635 return false;
14637 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
14640 /* Like int_size_in_bytes, but handle empty records specially. */
14642 HOST_WIDE_INT
14643 arg_int_size_in_bytes (const_tree type)
14645 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14648 /* Like size_in_bytes, but handle empty records specially. */
14650 tree
14651 arg_size_in_bytes (const_tree type)
14653 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14656 /* Return true if an expression with CODE has to have the same result type as
14657 its first operand. */
14659 bool
14660 expr_type_first_operand_type_p (tree_code code)
14662 switch (code)
14664 case NEGATE_EXPR:
14665 case ABS_EXPR:
14666 case BIT_NOT_EXPR:
14667 case PAREN_EXPR:
14668 case CONJ_EXPR:
14670 case PLUS_EXPR:
14671 case MINUS_EXPR:
14672 case MULT_EXPR:
14673 case TRUNC_DIV_EXPR:
14674 case CEIL_DIV_EXPR:
14675 case FLOOR_DIV_EXPR:
14676 case ROUND_DIV_EXPR:
14677 case TRUNC_MOD_EXPR:
14678 case CEIL_MOD_EXPR:
14679 case FLOOR_MOD_EXPR:
14680 case ROUND_MOD_EXPR:
14681 case RDIV_EXPR:
14682 case EXACT_DIV_EXPR:
14683 case MIN_EXPR:
14684 case MAX_EXPR:
14685 case BIT_IOR_EXPR:
14686 case BIT_XOR_EXPR:
14687 case BIT_AND_EXPR:
14689 case LSHIFT_EXPR:
14690 case RSHIFT_EXPR:
14691 case LROTATE_EXPR:
14692 case RROTATE_EXPR:
14693 return true;
14695 default:
14696 return false;
14700 /* Return a typenode for the "standard" C type with a given name. */
14701 tree
14702 get_typenode_from_name (const char *name)
14704 if (name == NULL || *name == '\0')
14705 return NULL_TREE;
14707 if (strcmp (name, "char") == 0)
14708 return char_type_node;
14709 if (strcmp (name, "unsigned char") == 0)
14710 return unsigned_char_type_node;
14711 if (strcmp (name, "signed char") == 0)
14712 return signed_char_type_node;
14714 if (strcmp (name, "short int") == 0)
14715 return short_integer_type_node;
14716 if (strcmp (name, "short unsigned int") == 0)
14717 return short_unsigned_type_node;
14719 if (strcmp (name, "int") == 0)
14720 return integer_type_node;
14721 if (strcmp (name, "unsigned int") == 0)
14722 return unsigned_type_node;
14724 if (strcmp (name, "long int") == 0)
14725 return long_integer_type_node;
14726 if (strcmp (name, "long unsigned int") == 0)
14727 return long_unsigned_type_node;
14729 if (strcmp (name, "long long int") == 0)
14730 return long_long_integer_type_node;
14731 if (strcmp (name, "long long unsigned int") == 0)
14732 return long_long_unsigned_type_node;
14734 gcc_unreachable ();
14737 /* List of pointer types used to declare builtins before we have seen their
14738 real declaration.
14740 Keep the size up to date in tree.h ! */
14741 const builtin_structptr_type builtin_structptr_types[6] =
14743 { fileptr_type_node, ptr_type_node, "FILE" },
14744 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14745 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14746 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14747 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14748 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14751 #if CHECKING_P
14753 namespace selftest {
14755 /* Selftests for tree. */
14757 /* Verify that integer constants are sane. */
14759 static void
14760 test_integer_constants ()
14762 ASSERT_TRUE (integer_type_node != NULL);
14763 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14765 tree type = integer_type_node;
14767 tree zero = build_zero_cst (type);
14768 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14769 ASSERT_EQ (type, TREE_TYPE (zero));
14771 tree one = build_int_cst (type, 1);
14772 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14773 ASSERT_EQ (type, TREE_TYPE (zero));
14776 /* Verify identifiers. */
14778 static void
14779 test_identifiers ()
14781 tree identifier = get_identifier ("foo");
14782 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14783 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14786 /* Verify LABEL_DECL. */
14788 static void
14789 test_labels ()
14791 tree identifier = get_identifier ("err");
14792 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14793 identifier, void_type_node);
14794 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14795 ASSERT_FALSE (FORCED_LABEL (label_decl));
14798 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14799 are given by VALS. */
14801 static tree
14802 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14804 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14805 tree_vector_builder builder (type, vals.length (), 1);
14806 builder.splice (vals);
14807 return builder.build ();
14810 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14812 static void
14813 check_vector_cst (vec<tree> expected, tree actual)
14815 ASSERT_KNOWN_EQ (expected.length (),
14816 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14817 for (unsigned int i = 0; i < expected.length (); ++i)
14818 ASSERT_EQ (wi::to_wide (expected[i]),
14819 wi::to_wide (vector_cst_elt (actual, i)));
14822 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14823 and that its elements match EXPECTED. */
14825 static void
14826 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14827 unsigned int npatterns)
14829 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14830 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14831 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14832 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14833 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14834 check_vector_cst (expected, actual);
14837 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14838 and NPATTERNS background elements, and that its elements match
14839 EXPECTED. */
14841 static void
14842 check_vector_cst_fill (vec<tree> expected, tree actual,
14843 unsigned int npatterns)
14845 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14846 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14847 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14848 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14849 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14850 check_vector_cst (expected, actual);
14853 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14854 and that its elements match EXPECTED. */
14856 static void
14857 check_vector_cst_stepped (vec<tree> expected, tree actual,
14858 unsigned int npatterns)
14860 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14861 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14862 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14863 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14864 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14865 check_vector_cst (expected, actual);
14868 /* Test the creation of VECTOR_CSTs. */
14870 static void
14871 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14873 auto_vec<tree, 8> elements (8);
14874 elements.quick_grow (8);
14875 tree element_type = build_nonstandard_integer_type (16, true);
14876 tree vector_type = build_vector_type (element_type, 8);
14878 /* Test a simple linear series with a base of 0 and a step of 1:
14879 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14880 for (unsigned int i = 0; i < 8; ++i)
14881 elements[i] = build_int_cst (element_type, i);
14882 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14883 check_vector_cst_stepped (elements, vector, 1);
14885 /* Try the same with the first element replaced by 100:
14886 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14887 elements[0] = build_int_cst (element_type, 100);
14888 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14889 check_vector_cst_stepped (elements, vector, 1);
14891 /* Try a series that wraps around.
14892 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14893 for (unsigned int i = 1; i < 8; ++i)
14894 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14895 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14896 check_vector_cst_stepped (elements, vector, 1);
14898 /* Try a downward series:
14899 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14900 for (unsigned int i = 1; i < 8; ++i)
14901 elements[i] = build_int_cst (element_type, 80 - i);
14902 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14903 check_vector_cst_stepped (elements, vector, 1);
14905 /* Try two interleaved series with different bases and steps:
14906 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14907 elements[1] = build_int_cst (element_type, 53);
14908 for (unsigned int i = 2; i < 8; i += 2)
14910 elements[i] = build_int_cst (element_type, 70 - i * 2);
14911 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14913 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14914 check_vector_cst_stepped (elements, vector, 2);
14916 /* Try a duplicated value:
14917 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14918 for (unsigned int i = 1; i < 8; ++i)
14919 elements[i] = elements[0];
14920 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14921 check_vector_cst_duplicate (elements, vector, 1);
14923 /* Try an interleaved duplicated value:
14924 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14925 elements[1] = build_int_cst (element_type, 55);
14926 for (unsigned int i = 2; i < 8; ++i)
14927 elements[i] = elements[i - 2];
14928 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14929 check_vector_cst_duplicate (elements, vector, 2);
14931 /* Try a duplicated value with 2 exceptions
14932 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14933 elements[0] = build_int_cst (element_type, 41);
14934 elements[1] = build_int_cst (element_type, 97);
14935 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14936 check_vector_cst_fill (elements, vector, 2);
14938 /* Try with and without a step
14939 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14940 for (unsigned int i = 3; i < 8; i += 2)
14941 elements[i] = build_int_cst (element_type, i * 7);
14942 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14943 check_vector_cst_stepped (elements, vector, 2);
14945 /* Try a fully-general constant:
14946 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14947 elements[5] = build_int_cst (element_type, 9990);
14948 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14949 check_vector_cst_fill (elements, vector, 4);
14952 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14953 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14954 modifying its argument in-place. */
14956 static void
14957 check_strip_nops (tree node, tree expected)
14959 STRIP_NOPS (node);
14960 ASSERT_EQ (expected, node);
14963 /* Verify location wrappers. */
14965 static void
14966 test_location_wrappers ()
14968 location_t loc = BUILTINS_LOCATION;
14970 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14972 /* Wrapping a constant. */
14973 tree int_cst = build_int_cst (integer_type_node, 42);
14974 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14975 ASSERT_FALSE (location_wrapper_p (int_cst));
14977 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14978 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14979 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14980 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14982 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14983 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14985 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14986 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14987 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14988 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14990 /* Wrapping a STRING_CST. */
14991 tree string_cst = build_string (4, "foo");
14992 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14993 ASSERT_FALSE (location_wrapper_p (string_cst));
14995 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14996 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14997 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14998 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14999 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15002 /* Wrapping a variable. */
15003 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15004 get_identifier ("some_int_var"),
15005 integer_type_node);
15006 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15007 ASSERT_FALSE (location_wrapper_p (int_var));
15009 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15010 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15011 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15012 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15014 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15015 wrapper. */
15016 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15017 ASSERT_FALSE (location_wrapper_p (r_cast));
15018 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15020 /* Verify that STRIP_NOPS removes wrappers. */
15021 check_strip_nops (wrapped_int_cst, int_cst);
15022 check_strip_nops (wrapped_string_cst, string_cst);
15023 check_strip_nops (wrapped_int_var, int_var);
15026 /* Check that string escaping works correctly. */
15028 static void
15029 test_escaped_strings (void)
15031 int saved_cutoff;
15032 escaped_string msg;
15034 msg.escape (NULL);
15035 /* ASSERT_STREQ does not accept NULL as a valid test
15036 result, so we have to use ASSERT_EQ instead. */
15037 ASSERT_EQ (NULL, (const char *) msg);
15039 msg.escape ("");
15040 ASSERT_STREQ ("", (const char *) msg);
15042 msg.escape ("foobar");
15043 ASSERT_STREQ ("foobar", (const char *) msg);
15045 /* Ensure that we have -fmessage-length set to 0. */
15046 saved_cutoff = pp_line_cutoff (global_dc->printer);
15047 pp_line_cutoff (global_dc->printer) = 0;
15049 msg.escape ("foo\nbar");
15050 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15052 msg.escape ("\a\b\f\n\r\t\v");
15053 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15055 /* Now repeat the tests with -fmessage-length set to 5. */
15056 pp_line_cutoff (global_dc->printer) = 5;
15058 /* Note that the newline is not translated into an escape. */
15059 msg.escape ("foo\nbar");
15060 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15062 msg.escape ("\a\b\f\n\r\t\v");
15063 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15065 /* Restore the original message length setting. */
15066 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15069 /* Run all of the selftests within this file. */
15071 void
15072 tree_c_tests ()
15074 test_integer_constants ();
15075 test_identifiers ();
15076 test_labels ();
15077 test_vector_cst_patterns ();
15078 test_location_wrappers ();
15079 test_escaped_strings ();
15082 } // namespace selftest
15084 #endif /* CHECKING_P */
15086 #include "gt-tree.h"