gccrs: Add get_locus function for abstract class MetaItemInner.
[official-gcc.git] / gcc / tree.cc
blob207293c48cba64c973d379779e69159eefdb1088
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 1, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 2, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_DEVICE_TYPE */
302 0, /* OMP_CLAUSE_FOR */
303 0, /* OMP_CLAUSE_PARALLEL */
304 0, /* OMP_CLAUSE_SECTIONS */
305 0, /* OMP_CLAUSE_TASKGROUP */
306 1, /* OMP_CLAUSE_PRIORITY */
307 1, /* OMP_CLAUSE_GRAINSIZE */
308 1, /* OMP_CLAUSE_NUM_TASKS */
309 0, /* OMP_CLAUSE_NOGROUP */
310 0, /* OMP_CLAUSE_THREADS */
311 0, /* OMP_CLAUSE_SIMD */
312 1, /* OMP_CLAUSE_HINT */
313 0, /* OMP_CLAUSE_DEFAULTMAP */
314 0, /* OMP_CLAUSE_ORDER */
315 0, /* OMP_CLAUSE_BIND */
316 1, /* OMP_CLAUSE_FILTER */
317 1, /* OMP_CLAUSE__SIMDUID_ */
318 0, /* OMP_CLAUSE__SIMT_ */
319 0, /* OMP_CLAUSE_INDEPENDENT */
320 1, /* OMP_CLAUSE_WORKER */
321 1, /* OMP_CLAUSE_VECTOR */
322 1, /* OMP_CLAUSE_NUM_GANGS */
323 1, /* OMP_CLAUSE_NUM_WORKERS */
324 1, /* OMP_CLAUSE_VECTOR_LENGTH */
325 3, /* OMP_CLAUSE_TILE */
326 0, /* OMP_CLAUSE_IF_PRESENT */
327 0, /* OMP_CLAUSE_FINALIZE */
328 0, /* OMP_CLAUSE_NOHOST */
331 const char * const omp_clause_code_name[] =
333 "error_clause",
334 "private",
335 "shared",
336 "firstprivate",
337 "lastprivate",
338 "reduction",
339 "task_reduction",
340 "in_reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "affinity",
345 "aligned",
346 "allocate",
347 "depend",
348 "nontemporal",
349 "uniform",
350 "enter",
351 "link",
352 "detach",
353 "use_device_ptr",
354 "use_device_addr",
355 "is_device_ptr",
356 "inclusive",
357 "exclusive",
358 "from",
359 "to",
360 "map",
361 "has_device_addr",
362 "doacross",
363 "_cache_",
364 "gang",
365 "async",
366 "wait",
367 "auto",
368 "seq",
369 "_looptemp_",
370 "_reductemp_",
371 "_condtemp_",
372 "_scantemp_",
373 "if",
374 "num_threads",
375 "schedule",
376 "nowait",
377 "ordered",
378 "default",
379 "collapse",
380 "untied",
381 "final",
382 "mergeable",
383 "device",
384 "dist_schedule",
385 "inbranch",
386 "notinbranch",
387 "num_teams",
388 "thread_limit",
389 "proc_bind",
390 "safelen",
391 "simdlen",
392 "device_type",
393 "for",
394 "parallel",
395 "sections",
396 "taskgroup",
397 "priority",
398 "grainsize",
399 "num_tasks",
400 "nogroup",
401 "threads",
402 "simd",
403 "hint",
404 "defaultmap",
405 "order",
406 "bind",
407 "filter",
408 "_simduid_",
409 "_simt_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length",
416 "tile",
417 "if_present",
418 "finalize",
419 "nohost",
422 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
423 clause names, but for use in diagnostics etc. would like to use the "user"
424 clause names. */
426 const char *
427 user_omp_clause_code_name (tree clause, bool oacc)
429 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
430 distinguish clauses as seen by the user. See also where front ends do
431 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
432 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
433 switch (OMP_CLAUSE_MAP_KIND (clause))
435 case GOMP_MAP_FORCE_ALLOC:
436 case GOMP_MAP_ALLOC: return "create";
437 case GOMP_MAP_FORCE_TO:
438 case GOMP_MAP_TO: return "copyin";
439 case GOMP_MAP_FORCE_FROM:
440 case GOMP_MAP_FROM: return "copyout";
441 case GOMP_MAP_FORCE_TOFROM:
442 case GOMP_MAP_TOFROM: return "copy";
443 case GOMP_MAP_RELEASE: return "delete";
444 case GOMP_MAP_FORCE_PRESENT: return "present";
445 case GOMP_MAP_ATTACH: return "attach";
446 case GOMP_MAP_FORCE_DETACH:
447 case GOMP_MAP_DETACH: return "detach";
448 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
449 case GOMP_MAP_LINK: return "link";
450 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
451 default: break;
454 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.cc. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.cc:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case LANG_TYPE: return sizeof (tree_type_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
1000 case tcc_reference: /* a reference */
1001 case tcc_expression: /* an expression */
1002 case tcc_statement: /* an expression with side effects */
1003 case tcc_comparison: /* a comparison expression */
1004 case tcc_unary: /* a unary arithmetic expression */
1005 case tcc_binary: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp)
1007 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1009 case tcc_constant: /* a constant */
1010 switch (code)
1012 case VOID_CST: return sizeof (tree_typed);
1013 case INTEGER_CST: gcc_unreachable ();
1014 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1015 case REAL_CST: return sizeof (tree_real_cst);
1016 case FIXED_CST: return sizeof (tree_fixed_cst);
1017 case COMPLEX_CST: return sizeof (tree_complex);
1018 case VECTOR_CST: gcc_unreachable ();
1019 case STRING_CST: gcc_unreachable ();
1020 default:
1021 gcc_checking_assert (code >= NUM_TREE_CODES);
1022 return lang_hooks.tree_size (code);
1025 case tcc_exceptional: /* something random, like an identifier. */
1026 switch (code)
1028 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1029 case TREE_LIST: return sizeof (tree_list);
1031 case ERROR_MARK:
1032 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1034 case TREE_VEC: gcc_unreachable ();
1035 case OMP_CLAUSE: gcc_unreachable ();
1037 case SSA_NAME: return sizeof (tree_ssa_name);
1039 case STATEMENT_LIST: return sizeof (tree_statement_list);
1040 case BLOCK: return sizeof (struct tree_block);
1041 case CONSTRUCTOR: return sizeof (tree_constructor);
1042 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1043 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1045 default:
1046 gcc_checking_assert (code >= NUM_TREE_CODES);
1047 return lang_hooks.tree_size (code);
1050 default:
1051 gcc_unreachable ();
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1057 size_t
1058 tree_size (const_tree node)
1060 const enum tree_code code = TREE_CODE (node);
1061 switch (code)
1063 case INTEGER_CST:
1064 return (sizeof (struct tree_int_cst)
1065 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1067 case TREE_BINFO:
1068 return (offsetof (struct tree_binfo, base_binfos)
1069 + vec<tree, va_gc>
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1072 case TREE_VEC:
1073 return (sizeof (struct tree_vec)
1074 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1076 case VECTOR_CST:
1077 return (sizeof (struct tree_vector)
1078 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1080 case STRING_CST:
1081 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1083 case OMP_CLAUSE:
1084 return (sizeof (struct tree_omp_clause)
1085 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1086 * sizeof (tree));
1088 default:
1089 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1090 return (sizeof (struct tree_exp)
1091 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1092 else
1093 return tree_code_size (code);
1097 /* Return tree node kind based on tree CODE. */
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code)
1102 enum tree_code_class type = TREE_CODE_CLASS (code);
1104 switch (type)
1106 case tcc_declaration: /* A decl node */
1107 return d_kind;
1108 case tcc_type: /* a type node */
1109 return t_kind;
1110 case tcc_statement: /* an expression with side effects */
1111 return s_kind;
1112 case tcc_reference: /* a reference */
1113 return r_kind;
1114 case tcc_expression: /* an expression */
1115 case tcc_comparison: /* a comparison expression */
1116 case tcc_unary: /* a unary arithmetic expression */
1117 case tcc_binary: /* a binary arithmetic expression */
1118 return e_kind;
1119 case tcc_constant: /* a constant */
1120 return c_kind;
1121 case tcc_exceptional: /* something random, like an identifier. */
1122 switch (code)
1124 case IDENTIFIER_NODE:
1125 return id_kind;
1126 case TREE_VEC:
1127 return vec_kind;
1128 case TREE_BINFO:
1129 return binfo_kind;
1130 case SSA_NAME:
1131 return ssa_name_kind;
1132 case BLOCK:
1133 return b_kind;
1134 case CONSTRUCTOR:
1135 return constr_kind;
1136 case OMP_CLAUSE:
1137 return omp_clause_kind;
1138 default:
1139 return x_kind;
1141 break;
1142 case tcc_vl_exp:
1143 return e_kind;
1144 default:
1145 gcc_unreachable ();
1149 /* Record interesting allocation statistics for a tree node with CODE
1150 and LENGTH. */
1152 static void
1153 record_node_allocation_statistics (enum tree_code code, size_t length)
1155 if (!GATHER_STATISTICS)
1156 return;
1158 tree_node_kind kind = get_stats_node_kind (code);
1160 tree_code_counts[(int) code]++;
1161 tree_node_counts[(int) kind]++;
1162 tree_node_sizes[(int) kind] += length;
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1168 allocate_decl_uid (void)
1170 return next_decl_uid++;
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 tree_code_size.
1179 Achoo! I got a code in the node. */
1181 tree
1182 make_node (enum tree_code code MEM_STAT_DECL)
1184 tree t;
1185 enum tree_code_class type = TREE_CODE_CLASS (code);
1186 size_t length = tree_code_size (code);
1188 record_node_allocation_statistics (code, length);
1190 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1191 TREE_SET_CODE (t, code);
1193 switch (type)
1195 case tcc_statement:
1196 if (code != DEBUG_BEGIN_STMT)
1197 TREE_SIDE_EFFECTS (t) = 1;
1198 break;
1200 case tcc_declaration:
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1203 if (code == FUNCTION_DECL)
1205 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1206 SET_DECL_MODE (t, FUNCTION_MODE);
1208 else
1209 SET_DECL_ALIGN (t, 1);
1211 DECL_SOURCE_LOCATION (t) = input_location;
1212 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1213 DECL_UID (t) = --next_debug_decl_uid;
1214 else
1216 DECL_UID (t) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t, -1);
1219 if (TREE_CODE (t) == LABEL_DECL)
1220 LABEL_DECL_UID (t) = -1;
1222 break;
1224 case tcc_type:
1225 TYPE_UID (t) = next_type_uid++;
1226 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1227 TYPE_USER_ALIGN (t) = 0;
1228 TYPE_MAIN_VARIANT (t) = t;
1229 TYPE_CANONICAL (t) = t;
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t) = NULL_TREE;
1233 targetm.set_default_type_attributes (t);
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t) = -1;
1237 break;
1239 case tcc_constant:
1240 TREE_CONSTANT (t) = 1;
1241 break;
1243 case tcc_expression:
1244 switch (code)
1246 case INIT_EXPR:
1247 case MODIFY_EXPR:
1248 case VA_ARG_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case PREINCREMENT_EXPR:
1251 case POSTDECREMENT_EXPR:
1252 case POSTINCREMENT_EXPR:
1253 /* All of these have side-effects, no matter what their
1254 operands are. */
1255 TREE_SIDE_EFFECTS (t) = 1;
1256 break;
1258 default:
1259 break;
1261 break;
1263 case tcc_exceptional:
1264 switch (code)
1266 case TARGET_OPTION_NODE:
1267 TREE_TARGET_OPTION(t)
1268 = ggc_cleared_alloc<struct cl_target_option> ();
1269 break;
1271 case OPTIMIZATION_NODE:
1272 TREE_OPTIMIZATION (t)
1273 = ggc_cleared_alloc<struct cl_optimization> ();
1274 break;
1276 default:
1277 break;
1279 break;
1281 default:
1282 /* Other classes need no special treatment. */
1283 break;
1286 return t;
1289 /* Free tree node. */
1291 void
1292 free_node (tree node)
1294 enum tree_code code = TREE_CODE (node);
1295 if (GATHER_STATISTICS)
1297 enum tree_node_kind kind = get_stats_node_kind (code);
1299 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1300 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1301 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1303 tree_code_counts[(int) TREE_CODE (node)]--;
1304 tree_node_counts[(int) kind]--;
1305 tree_node_sizes[(int) kind] -= tree_size (node);
1307 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1308 vec_free (CONSTRUCTOR_ELTS (node));
1309 else if (code == BLOCK)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1311 else if (code == TREE_BINFO)
1312 vec_free (BINFO_BASE_ACCESSES (node));
1313 else if (code == OPTIMIZATION_NODE)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1315 else if (code == TARGET_OPTION_NODE)
1316 cl_target_option_free (TREE_TARGET_OPTION (node));
1317 ggc_free (node);
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1323 tree
1324 copy_node (tree node MEM_STAT_DECL)
1326 tree t;
1327 enum tree_code code = TREE_CODE (node);
1328 size_t length;
1330 gcc_assert (code != STATEMENT_LIST);
1332 length = tree_size (node);
1333 record_node_allocation_statistics (code, length);
1334 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1335 memcpy (t, node, length);
1337 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1338 TREE_CHAIN (t) = 0;
1339 TREE_ASM_WRITTEN (t) = 0;
1340 TREE_VISITED (t) = 0;
1342 if (TREE_CODE_CLASS (code) == tcc_declaration)
1344 if (code == DEBUG_EXPR_DECL)
1345 DECL_UID (t) = --next_debug_decl_uid;
1346 else
1348 DECL_UID (t) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node))
1350 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1352 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1353 && DECL_HAS_VALUE_EXPR_P (node))
1355 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1356 DECL_HAS_VALUE_EXPR_P (t) = 1;
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 if (VAR_P (node))
1361 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1362 t->decl_with_vis.symtab_node = NULL;
1364 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1366 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1367 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1369 if (TREE_CODE (node) == FUNCTION_DECL)
1371 DECL_STRUCT_FUNCTION (t) = NULL;
1372 t->decl_with_vis.symtab_node = NULL;
1375 else if (TREE_CODE_CLASS (code) == tcc_type)
1377 TYPE_UID (t) = next_type_uid++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t) = 0;
1384 TYPE_SYMTAB_DIE (t) = 0;
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t))
1389 TYPE_CACHED_VALUES_P (t) = 0;
1390 TYPE_CACHED_VALUES (t) = NULL_TREE;
1393 else if (code == TARGET_OPTION_NODE)
1395 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1396 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1397 sizeof (struct cl_target_option));
1399 else if (code == OPTIMIZATION_NODE)
1401 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1402 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1403 sizeof (struct cl_optimization));
1406 return t;
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1412 tree
1413 copy_list (tree list)
1415 tree head;
1416 tree prev, next;
1418 if (list == 0)
1419 return 0;
1421 head = prev = copy_node (list);
1422 next = TREE_CHAIN (list);
1423 while (next)
1425 TREE_CHAIN (prev) = copy_node (next);
1426 prev = TREE_CHAIN (prev);
1427 next = TREE_CHAIN (next);
1429 return head;
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1436 static unsigned int
1437 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1439 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1441 upper bit set. */
1442 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1443 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1444 return cst.get_len ();
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1449 static tree
1450 build_new_int_cst (tree type, const wide_int &cst)
1452 unsigned int len = cst.get_len ();
1453 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1454 tree nt = make_int_cst (len, ext_len);
1456 if (len < ext_len)
1458 --ext_len;
1459 TREE_INT_CST_ELT (nt, ext_len)
1460 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1461 for (unsigned int i = len; i < ext_len; ++i)
1462 TREE_INT_CST_ELT (nt, i) = -1;
1464 else if (TYPE_UNSIGNED (type)
1465 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1467 len--;
1468 TREE_INT_CST_ELT (nt, len)
1469 = zext_hwi (cst.elt (len),
1470 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1473 for (unsigned int i = 0; i < len; i++)
1474 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1475 TREE_TYPE (nt) = type;
1476 return nt;
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1481 static tree
1482 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1483 CXX_MEM_STAT_INFO)
1485 size_t length = sizeof (struct tree_poly_int_cst);
1486 record_node_allocation_statistics (POLY_INT_CST, length);
1488 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1490 TREE_SET_CODE (t, POLY_INT_CST);
1491 TREE_CONSTANT (t) = 1;
1492 TREE_TYPE (t) = type;
1493 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1494 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1495 return t;
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1500 tree
1501 build_int_cst (tree type, poly_int64 cst)
1503 /* Support legacy code. */
1504 if (!type)
1505 type = integer_type_node;
1507 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1512 tree
1513 build_int_cstu (tree type, poly_uint64 cst)
1515 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1520 tree
1521 build_int_cst_type (tree type, poly_int64 cst)
1523 gcc_assert (type);
1524 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1530 tree
1531 double_int_to_tree (tree type, double_int cst)
1533 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1551 tree
1552 force_fit_type (tree type, const poly_wide_int_ref &cst,
1553 int overflowable, bool overflowed)
1555 signop sign = TYPE_SIGN (type);
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed || !wi::fits_to_tree_p (cst, type))
1560 if (overflowed
1561 || overflowable < 0
1562 || (overflowable > 0 && sign == SIGNED))
1564 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1565 sign);
1566 tree t;
1567 if (tmp.is_constant ())
1568 t = build_new_int_cst (type, tmp.coeffs[0]);
1569 else
1571 tree coeffs[NUM_POLY_INT_COEFFS];
1572 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1574 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1575 TREE_OVERFLOW (coeffs[i]) = 1;
1577 t = build_new_poly_int_cst (type, coeffs);
1579 TREE_OVERFLOW (t) = 1;
1580 return t;
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type, cst);
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1591 /* Return the hash code X, an INTEGER_CST. */
1593 hashval_t
1594 int_cst_hasher::hash (tree x)
1596 const_tree const t = x;
1597 hashval_t code = TYPE_UID (TREE_TYPE (t));
1598 int i;
1600 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1601 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1603 return code;
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1609 bool
1610 int_cst_hasher::equal (tree x, tree y)
1612 const_tree const xt = x;
1613 const_tree const yt = y;
1615 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1616 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1617 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1618 return false;
1620 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1621 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1622 return false;
1624 return true;
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1631 static inline tree
1632 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1633 int slot, int max_slots)
1635 gcc_checking_assert (slot >= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1642 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1643 if (!t)
1645 /* Create a new shared int. */
1646 t = build_new_int_cst (type, cst);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1649 return t;
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1660 static tree
1661 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1663 tree t;
1664 int ix = -1;
1665 int limit = 0;
1667 gcc_assert (type);
1668 unsigned int prec = TYPE_PRECISION (type);
1669 signop sgn = TYPE_SIGN (type);
1671 /* Verify that everything is canonical. */
1672 int l = pcst.get_len ();
1673 if (l > 1)
1675 if (pcst.elt (l - 1) == 0)
1676 gcc_checking_assert (pcst.elt (l - 2) < 0);
1677 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1678 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1681 wide_int cst = wide_int::from (pcst, prec, sgn);
1682 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1684 enum tree_code code = TREE_CODE (type);
1685 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1687 /* Cache NULL pointer and zero bounds. */
1688 if (cst == 0)
1689 ix = 0;
1690 /* Cache upper bounds of pointers. */
1691 else if (cst == wi::max_value (prec, sgn))
1692 ix = 1;
1693 /* Cache 1 which is used for a non-zero range. */
1694 else if (cst == 1)
1695 ix = 2;
1697 if (ix >= 0)
1699 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t) == type
1702 && cst == wi::to_wide (t));
1703 return t;
1706 if (ext_len == 1)
1708 /* We just need to store a single HOST_WIDE_INT. */
1709 HOST_WIDE_INT hwi;
1710 if (TYPE_UNSIGNED (type))
1711 hwi = cst.to_uhwi ();
1712 else
1713 hwi = cst.to_shwi ();
1715 switch (code)
1717 case NULLPTR_TYPE:
1718 gcc_assert (hwi == 0);
1719 /* Fallthru. */
1721 case POINTER_TYPE:
1722 case REFERENCE_TYPE:
1723 /* Ignore pointers, as they were already handled above. */
1724 break;
1726 case BOOLEAN_TYPE:
1727 /* Cache false or true. */
1728 limit = 2;
1729 if (IN_RANGE (hwi, 0, 1))
1730 ix = hwi;
1731 break;
1733 case INTEGER_TYPE:
1734 case OFFSET_TYPE:
1735 if (TYPE_SIGN (type) == UNSIGNED)
1737 /* Cache [0, N). */
1738 limit = param_integer_share_limit;
1739 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1740 ix = hwi;
1742 else
1744 /* Cache [-1, N). */
1745 limit = param_integer_share_limit + 1;
1746 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1747 ix = hwi + 1;
1749 break;
1751 case ENUMERAL_TYPE:
1752 break;
1754 default:
1755 gcc_unreachable ();
1758 if (ix >= 0)
1760 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t) == type
1763 && TREE_INT_CST_NUNITS (t) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t) == 1
1766 && TREE_INT_CST_ELT (t, 0) == hwi);
1767 return t;
1769 else
1771 /* Use the cache of larger shared ints, using int_cst_node as
1772 a temporary. */
1774 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1775 TREE_TYPE (int_cst_node) = type;
1777 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1778 t = *slot;
1779 if (!t)
1781 /* Insert this one into the hash table. */
1782 t = int_cst_node;
1783 *slot = t;
1784 /* Make a new node for next time round. */
1785 int_cst_node = make_int_cst (1, 1);
1789 else
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1793 to worry about. */
1795 tree nt = build_new_int_cst (type, cst);
1796 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1797 t = *slot;
1798 if (!t)
1800 /* Insert this one into the hash table. */
1801 t = nt;
1802 *slot = t;
1804 else
1805 ggc_free (nt);
1808 return t;
1811 hashval_t
1812 poly_int_cst_hasher::hash (tree t)
1814 inchash::hash hstate;
1816 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1817 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1818 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1820 return hstate.end ();
1823 bool
1824 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1826 if (TREE_TYPE (x) != y.first)
1827 return false;
1828 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1830 return false;
1831 return true;
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1837 tree
1838 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1840 unsigned int prec = TYPE_PRECISION (type);
1841 gcc_assert (prec <= values.coeffs[0].get_precision ());
1842 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1844 inchash::hash h;
1845 h.add_int (TYPE_UID (type));
1846 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1847 h.add_wide_int (c.coeffs[i]);
1848 poly_int_cst_hasher::compare_type comp (type, &c);
1849 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1850 INSERT);
1851 if (*slot == NULL_TREE)
1853 tree coeffs[NUM_POLY_INT_COEFFS];
1854 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1855 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1856 *slot = build_new_poly_int_cst (type, coeffs);
1858 return *slot;
1861 /* Create a constant tree with value VALUE in type TYPE. */
1863 tree
1864 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1866 if (value.is_constant ())
1867 return wide_int_to_tree_1 (type, value.coeffs[0]);
1868 return build_poly_int_cst (type, value);
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1878 tree
1879 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1881 tree type = TREE_TYPE (t);
1882 int ix = -1;
1883 int limit = 0;
1884 int prec = TYPE_PRECISION (type);
1886 gcc_assert (!TREE_OVERFLOW (t));
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type))
1892 case NULLPTR_TYPE:
1893 gcc_checking_assert (integer_zerop (t));
1894 /* Fallthru. */
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1899 if (integer_zerop (t))
1900 ix = 0;
1901 else if (integer_onep (t))
1902 ix = 2;
1904 if (ix >= 0)
1905 limit = 3;
1907 break;
1909 case BOOLEAN_TYPE:
1910 /* Cache false or true. */
1911 limit = 2;
1912 if (wi::ltu_p (wi::to_wide (t), 2))
1913 ix = TREE_INT_CST_ELT (t, 0);
1914 break;
1916 case INTEGER_TYPE:
1917 case OFFSET_TYPE:
1918 if (TYPE_UNSIGNED (type))
1920 /* Cache 0..N */
1921 limit = param_integer_share_limit;
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec < HOST_BITS_PER_WIDE_INT)
1928 if (tree_to_uhwi (t)
1929 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1930 ix = tree_to_uhwi (t);
1932 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1933 ix = tree_to_uhwi (t);
1935 else
1937 /* Cache -1..N */
1938 limit = param_integer_share_limit + 1;
1940 if (integer_minus_onep (t))
1941 ix = 0;
1942 else if (!wi::neg_p (wi::to_wide (t)))
1944 if (prec < HOST_BITS_PER_WIDE_INT)
1946 if (tree_to_shwi (t) < param_integer_share_limit)
1947 ix = tree_to_shwi (t) + 1;
1949 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1950 ix = tree_to_shwi (t) + 1;
1953 break;
1955 case ENUMERAL_TYPE:
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1957 members. */
1958 break;
1960 default:
1961 gcc_unreachable ();
1964 if (ix >= 0)
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type))
1969 TYPE_CACHED_VALUES_P (type) = 1;
1970 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1973 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1975 gcc_checking_assert (might_duplicate);
1976 t = r;
1978 else
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1981 else
1983 /* Use the cache of larger shared ints. */
1984 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1985 if (tree r = *slot)
1987 /* If there is already an entry for the number verify it's the
1988 same value. */
1989 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1990 /* And return the cached value. */
1991 t = r;
1993 else
1994 /* Otherwise insert this one into the hash table. */
1995 *slot = t;
1998 return t;
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2005 tree
2006 build_low_bits_mask (tree type, unsigned bits)
2008 gcc_assert (bits <= TYPE_PRECISION (type));
2010 return wide_int_to_tree (type, wi::mask (bits, false,
2011 TYPE_PRECISION (type)));
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2017 bool
2018 cst_and_fits_in_hwi (const_tree x)
2020 return (TREE_CODE (x) == INTEGER_CST
2021 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2027 tree
2028 make_vector (unsigned log2_npatterns,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL)
2031 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2032 tree t;
2033 unsigned npatterns = 1 << log2_npatterns;
2034 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2035 unsigned length = (sizeof (struct tree_vector)
2036 + (encoded_nelts - 1) * sizeof (tree));
2038 record_node_allocation_statistics (VECTOR_CST, length);
2040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2042 TREE_SET_CODE (t, VECTOR_CST);
2043 TREE_CONSTANT (t) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2045 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2047 return t;
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2053 tree
2054 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2056 if (vec_safe_length (v) == 0)
2057 return build_zero_cst (type);
2059 unsigned HOST_WIDE_INT idx, nelts;
2060 tree value;
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2064 tree_vector_builder vec (type, nelts, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2067 if (TREE_CODE (value) == VECTOR_CST)
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2071 for (unsigned i = 0; i < sub_nelts; ++i)
2072 vec.quick_push (VECTOR_CST_ELT (value, i));
2074 else
2075 vec.quick_push (value);
2077 while (vec.length () < nelts)
2078 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2080 return vec.build ();
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2084 tree
2085 build_vector_from_val (tree vectype, tree sc)
2087 unsigned HOST_WIDE_INT i, nunits;
2089 if (sc == error_mark_node)
2090 return sc;
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2099 TREE_TYPE (vectype)));
2101 if (CONSTANT_CLASS_P (sc))
2103 tree_vector_builder v (vectype, 1, 1);
2104 v.quick_push (sc);
2105 return v.build ();
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2108 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2109 else
2111 vec<constructor_elt, va_gc> *v;
2112 vec_alloc (v, nunits);
2113 for (i = 0; i < nunits; ++i)
2114 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2115 return build_constructor (vectype, v);
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2122 tree
2123 build_uniform_cst (tree type, tree sc)
2125 if (!VECTOR_TYPE_P (type))
2126 return sc;
2128 return build_vector_from_val (type, sc);
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2135 tree
2136 build_vec_series (tree type, tree base, tree step)
2138 if (integer_zerop (step))
2139 return build_vector_from_val (type, base);
2140 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2142 tree_vector_builder builder (type, 1, 3);
2143 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (base) + wi::to_wide (step));
2145 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (elt1) + wi::to_wide (step));
2147 builder.quick_push (base);
2148 builder.quick_push (elt1);
2149 builder.quick_push (elt2);
2150 return builder.build ();
2152 return build2 (VEC_SERIES_EXPR, type, base, step);
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2159 tree
2160 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2162 tree index_vec_type = vec_type;
2163 tree index_elt_type = TREE_TYPE (vec_type);
2164 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2165 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2167 index_elt_type = build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2169 index_vec_type = build_vector_type (index_elt_type, nunits);
2172 tree_vector_builder v (index_vec_type, 1, 3);
2173 for (unsigned int i = 0; i < 3; ++i)
2174 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2175 return v.build ();
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2181 tree
2182 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2184 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2185 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2186 /* Optimize the constant case. */
2187 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2188 count /= 2;
2189 tree_vector_builder builder (vec_type, count, 2);
2190 for (unsigned int i = 0; i < count * 2; ++i)
2191 builder.quick_push (i < num_a ? a : b);
2192 return builder.build ();
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2198 void
2199 recompute_constructor_flags (tree c)
2201 unsigned int i;
2202 tree val;
2203 bool constant_p = true;
2204 bool side_effects_p = false;
2205 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val))
2214 constant_p = false;
2215 if (TREE_SIDE_EFFECTS (val))
2216 side_effects_p = true;
2219 TREE_SIDE_EFFECTS (c) = side_effects_p;
2220 TREE_CONSTANT (c) = constant_p;
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2224 CONSTRUCTOR C. */
2226 void
2227 verify_constructor_flags (tree c)
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = TREE_CONSTANT (c);
2232 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2237 if (constant_p && !TREE_CONSTANT (val))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2246 tree
2247 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2249 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2251 TREE_TYPE (c) = type;
2252 CONSTRUCTOR_ELTS (c) = vals;
2254 recompute_constructor_flags (c);
2256 return c;
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 INDEX and VALUE. */
2261 tree
2262 build_constructor_single (tree type, tree index, tree value)
2264 vec<constructor_elt, va_gc> *v;
2265 constructor_elt elt = {index, value};
2267 vec_alloc (v, 1);
2268 v->quick_push (elt);
2270 return build_constructor (type, v);
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2276 tree
2277 build_constructor_from_list (tree type, tree vals)
2279 tree t;
2280 vec<constructor_elt, va_gc> *v = NULL;
2282 if (vals)
2284 vec_alloc (v, list_length (vals));
2285 for (t = vals; t; t = TREE_CHAIN (t))
2286 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2289 return build_constructor (type, v);
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2296 tree
2297 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2299 vec<constructor_elt, va_gc> *v = NULL;
2301 for (tree t : vals)
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2304 return build_constructor (type, v);
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2310 tree
2311 build_constructor_va (tree type, int nelts, ...)
2313 vec<constructor_elt, va_gc> *v = NULL;
2314 va_list p;
2316 va_start (p, nelts);
2317 vec_alloc (v, nelts);
2318 while (nelts--)
2320 tree index = va_arg (p, tree);
2321 tree value = va_arg (p, tree);
2322 CONSTRUCTOR_APPEND_ELT (v, index, value);
2324 va_end (p);
2325 return build_constructor (type, v);
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2330 tree
2331 build_clobber (tree type, enum clobber_kind kind)
2333 tree clobber = build_constructor (type, NULL);
2334 TREE_THIS_VOLATILE (clobber) = true;
2335 CLOBBER_KIND (clobber) = kind;
2336 return clobber;
2339 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2341 tree
2342 build_fixed (tree type, FIXED_VALUE_TYPE f)
2344 tree v;
2345 FIXED_VALUE_TYPE *fp;
2347 v = make_node (FIXED_CST);
2348 fp = ggc_alloc<fixed_value> ();
2349 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2351 TREE_TYPE (v) = type;
2352 TREE_FIXED_CST_PTR (v) = fp;
2353 return v;
2356 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2358 tree
2359 build_real (tree type, REAL_VALUE_TYPE d)
2361 tree v;
2362 int overflow = 0;
2364 /* dconst{0,1,2,m1,half} are used in various places in
2365 the middle-end and optimizers, allow them here
2366 even for decimal floating point types as an exception
2367 by converting them to decimal. */
2368 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2369 && (d.cl == rvc_normal || d.cl == rvc_zero)
2370 && !d.decimal)
2372 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2373 decimal_real_from_string (&d, "1");
2374 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "2");
2376 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "-1");
2378 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "0.5");
2380 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2382 /* Make sure to give zero the minimum quantum exponent for
2383 the type (which corresponds to all bits zero). */
2384 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2385 char buf[16];
2386 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2387 decimal_real_from_string (&d, buf);
2389 else
2390 gcc_unreachable ();
2393 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2394 Consider doing it via real_convert now. */
2396 v = make_node (REAL_CST);
2397 TREE_TYPE (v) = type;
2398 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2399 TREE_OVERFLOW (v) = overflow;
2400 return v;
2403 /* Like build_real, but first truncate D to the type. */
2405 tree
2406 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2408 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2411 /* Return a new REAL_CST node whose type is TYPE
2412 and whose value is the integer value of the INTEGER_CST node I. */
2414 REAL_VALUE_TYPE
2415 real_value_from_int_cst (const_tree type, const_tree i)
2417 REAL_VALUE_TYPE d;
2419 /* Clear all bits of the real value type so that we can later do
2420 bitwise comparisons to see if two values are the same. */
2421 memset (&d, 0, sizeof d);
2423 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2424 TYPE_SIGN (TREE_TYPE (i)));
2425 return d;
2428 /* Given a tree representing an integer constant I, return a tree
2429 representing the same value as a floating-point constant of type TYPE. */
2431 tree
2432 build_real_from_int_cst (tree type, const_tree i)
2434 tree v;
2435 int overflow = TREE_OVERFLOW (i);
2437 v = build_real (type, real_value_from_int_cst (type, i));
2439 TREE_OVERFLOW (v) |= overflow;
2440 return v;
2443 /* Return a new REAL_CST node whose type is TYPE
2444 and whose value is the integer value I which has sign SGN. */
2446 tree
2447 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2449 REAL_VALUE_TYPE d;
2451 /* Clear all bits of the real value type so that we can later do
2452 bitwise comparisons to see if two values are the same. */
2453 memset (&d, 0, sizeof d);
2455 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2456 return build_real (type, d);
2459 /* Return a newly constructed STRING_CST node whose value is the LEN
2460 characters at STR when STR is nonnull, or all zeros otherwise.
2461 Note that for a C string literal, LEN should include the trailing NUL.
2462 The TREE_TYPE is not initialized. */
2464 tree
2465 build_string (unsigned len, const char *str /*= NULL */)
2467 /* Do not waste bytes provided by padding of struct tree_string. */
2468 unsigned size = len + offsetof (struct tree_string, str) + 1;
2470 record_node_allocation_statistics (STRING_CST, size);
2472 tree s = (tree) ggc_internal_alloc (size);
2474 memset (s, 0, sizeof (struct tree_typed));
2475 TREE_SET_CODE (s, STRING_CST);
2476 TREE_CONSTANT (s) = 1;
2477 TREE_STRING_LENGTH (s) = len;
2478 if (str)
2479 memcpy (s->string.str, str, len);
2480 else
2481 memset (s->string.str, 0, len);
2482 s->string.str[len] = '\0';
2484 return s;
2487 /* Return a newly constructed COMPLEX_CST node whose value is
2488 specified by the real and imaginary parts REAL and IMAG.
2489 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2490 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2492 tree
2493 build_complex (tree type, tree real, tree imag)
2495 gcc_assert (CONSTANT_CLASS_P (real));
2496 gcc_assert (CONSTANT_CLASS_P (imag));
2498 tree t = make_node (COMPLEX_CST);
2500 TREE_REALPART (t) = real;
2501 TREE_IMAGPART (t) = imag;
2502 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2503 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2504 return t;
2507 /* Build a complex (inf +- 0i), such as for the result of cproj.
2508 TYPE is the complex tree type of the result. If NEG is true, the
2509 imaginary zero is negative. */
2511 tree
2512 build_complex_inf (tree type, bool neg)
2514 REAL_VALUE_TYPE rzero = dconst0;
2516 rzero.sign = neg;
2517 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2518 build_real (TREE_TYPE (type), rzero));
2521 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2522 element is set to 1. In particular, this is 1 + i for complex types. */
2524 tree
2525 build_each_one_cst (tree type)
2527 if (TREE_CODE (type) == COMPLEX_TYPE)
2529 tree scalar = build_one_cst (TREE_TYPE (type));
2530 return build_complex (type, scalar, scalar);
2532 else
2533 return build_one_cst (type);
2536 /* Return a constant of arithmetic type TYPE which is the
2537 multiplicative identity of the set TYPE. */
2539 tree
2540 build_one_cst (tree type)
2542 switch (TREE_CODE (type))
2544 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2545 case POINTER_TYPE: case REFERENCE_TYPE:
2546 case OFFSET_TYPE:
2547 return build_int_cst (type, 1);
2549 case REAL_TYPE:
2550 return build_real (type, dconst1);
2552 case FIXED_POINT_TYPE:
2553 /* We can only generate 1 for accum types. */
2554 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2555 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2557 case VECTOR_TYPE:
2559 tree scalar = build_one_cst (TREE_TYPE (type));
2561 return build_vector_from_val (type, scalar);
2564 case COMPLEX_TYPE:
2565 return build_complex (type,
2566 build_one_cst (TREE_TYPE (type)),
2567 build_zero_cst (TREE_TYPE (type)));
2569 default:
2570 gcc_unreachable ();
2574 /* Return an integer of type TYPE containing all 1's in as much precision as
2575 it contains, or a complex or vector whose subparts are such integers. */
2577 tree
2578 build_all_ones_cst (tree type)
2580 if (TREE_CODE (type) == COMPLEX_TYPE)
2582 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2583 return build_complex (type, scalar, scalar);
2585 else
2586 return build_minus_one_cst (type);
2589 /* Return a constant of arithmetic type TYPE which is the
2590 opposite of the multiplicative identity of the set TYPE. */
2592 tree
2593 build_minus_one_cst (tree type)
2595 switch (TREE_CODE (type))
2597 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2598 case POINTER_TYPE: case REFERENCE_TYPE:
2599 case OFFSET_TYPE:
2600 return build_int_cst (type, -1);
2602 case REAL_TYPE:
2603 return build_real (type, dconstm1);
2605 case FIXED_POINT_TYPE:
2606 /* We can only generate 1 for accum types. */
2607 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2608 return build_fixed (type,
2609 fixed_from_double_int (double_int_minus_one,
2610 SCALAR_TYPE_MODE (type)));
2612 case VECTOR_TYPE:
2614 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2616 return build_vector_from_val (type, scalar);
2619 case COMPLEX_TYPE:
2620 return build_complex (type,
2621 build_minus_one_cst (TREE_TYPE (type)),
2622 build_zero_cst (TREE_TYPE (type)));
2624 default:
2625 gcc_unreachable ();
2629 /* Build 0 constant of type TYPE. This is used by constructor folding
2630 and thus the constant should be represented in memory by
2631 zero(es). */
2633 tree
2634 build_zero_cst (tree type)
2636 switch (TREE_CODE (type))
2638 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2639 case POINTER_TYPE: case REFERENCE_TYPE:
2640 case OFFSET_TYPE: case NULLPTR_TYPE:
2641 return build_int_cst (type, 0);
2643 case REAL_TYPE:
2644 return build_real (type, dconst0);
2646 case FIXED_POINT_TYPE:
2647 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2649 case VECTOR_TYPE:
2651 tree scalar = build_zero_cst (TREE_TYPE (type));
2653 return build_vector_from_val (type, scalar);
2656 case COMPLEX_TYPE:
2658 tree zero = build_zero_cst (TREE_TYPE (type));
2660 return build_complex (type, zero, zero);
2663 default:
2664 if (!AGGREGATE_TYPE_P (type))
2665 return fold_convert (type, integer_zero_node);
2666 return build_constructor (type, NULL);
2670 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2671 unsigned constant in which only the sign bit is set. Return null
2672 otherwise. */
2674 tree
2675 sign_mask_for (tree type)
2677 /* Avoid having to choose between a real-only sign and a pair of signs.
2678 This could be relaxed if the choice becomes obvious later. */
2679 if (TREE_CODE (type) == COMPLEX_TYPE)
2680 return NULL_TREE;
2682 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2683 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2684 if (!bits || !pow2p_hwi (bits))
2685 return NULL_TREE;
2687 tree inttype = unsigned_type_for (type);
2688 if (!inttype)
2689 return NULL_TREE;
2691 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2692 if (TREE_CODE (inttype) == VECTOR_TYPE)
2694 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2695 return build_vector_from_val (inttype, elt);
2697 return wide_int_to_tree (inttype, mask);
2700 /* Build a BINFO with LEN language slots. */
2702 tree
2703 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2705 tree t;
2706 size_t length = (offsetof (struct tree_binfo, base_binfos)
2707 + vec<tree, va_gc>::embedded_size (base_binfos));
2709 record_node_allocation_statistics (TREE_BINFO, length);
2711 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2713 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2715 TREE_SET_CODE (t, TREE_BINFO);
2717 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2719 return t;
2722 /* Create a CASE_LABEL_EXPR tree node and return it. */
2724 tree
2725 build_case_label (tree low_value, tree high_value, tree label_decl)
2727 tree t = make_node (CASE_LABEL_EXPR);
2729 TREE_TYPE (t) = void_type_node;
2730 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2732 CASE_LOW (t) = low_value;
2733 CASE_HIGH (t) = high_value;
2734 CASE_LABEL (t) = label_decl;
2735 CASE_CHAIN (t) = NULL_TREE;
2737 return t;
2740 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2741 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2742 The latter determines the length of the HOST_WIDE_INT vector. */
2744 tree
2745 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2747 tree t;
2748 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2749 + sizeof (struct tree_int_cst));
2751 gcc_assert (len);
2752 record_node_allocation_statistics (INTEGER_CST, length);
2754 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2756 TREE_SET_CODE (t, INTEGER_CST);
2757 TREE_INT_CST_NUNITS (t) = len;
2758 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2759 /* to_offset can only be applied to trees that are offset_int-sized
2760 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2761 must be exactly the precision of offset_int and so LEN is correct. */
2762 if (ext_len <= OFFSET_INT_ELTS)
2763 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2764 else
2765 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2767 TREE_CONSTANT (t) = 1;
2769 return t;
2772 /* Build a newly constructed TREE_VEC node of length LEN. */
2774 tree
2775 make_tree_vec (int len MEM_STAT_DECL)
2777 tree t;
2778 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2780 record_node_allocation_statistics (TREE_VEC, length);
2782 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2784 TREE_SET_CODE (t, TREE_VEC);
2785 TREE_VEC_LENGTH (t) = len;
2787 return t;
2790 /* Grow a TREE_VEC node to new length LEN. */
2792 tree
2793 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2795 gcc_assert (TREE_CODE (v) == TREE_VEC);
2797 int oldlen = TREE_VEC_LENGTH (v);
2798 gcc_assert (len > oldlen);
2800 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2801 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2803 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2805 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2807 TREE_VEC_LENGTH (v) = len;
2809 return v;
2812 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2813 fixed, and scalar, complex or vector. */
2815 bool
2816 zerop (const_tree expr)
2818 return (integer_zerop (expr)
2819 || real_zerop (expr)
2820 || fixed_zerop (expr));
2823 /* Return 1 if EXPR is the integer constant zero or a complex constant
2824 of zero, or a location wrapper for such a constant. */
2826 bool
2827 integer_zerop (const_tree expr)
2829 STRIP_ANY_LOCATION_WRAPPER (expr);
2831 switch (TREE_CODE (expr))
2833 case INTEGER_CST:
2834 return wi::to_wide (expr) == 0;
2835 case COMPLEX_CST:
2836 return (integer_zerop (TREE_REALPART (expr))
2837 && integer_zerop (TREE_IMAGPART (expr)));
2838 case VECTOR_CST:
2839 return (VECTOR_CST_NPATTERNS (expr) == 1
2840 && VECTOR_CST_DUPLICATE_P (expr)
2841 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2842 default:
2843 return false;
2847 /* Return 1 if EXPR is the integer constant one or the corresponding
2848 complex constant, or a location wrapper for such a constant. */
2850 bool
2851 integer_onep (const_tree expr)
2853 STRIP_ANY_LOCATION_WRAPPER (expr);
2855 switch (TREE_CODE (expr))
2857 case INTEGER_CST:
2858 return wi::eq_p (wi::to_widest (expr), 1);
2859 case COMPLEX_CST:
2860 return (integer_onep (TREE_REALPART (expr))
2861 && integer_zerop (TREE_IMAGPART (expr)));
2862 case VECTOR_CST:
2863 return (VECTOR_CST_NPATTERNS (expr) == 1
2864 && VECTOR_CST_DUPLICATE_P (expr)
2865 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2866 default:
2867 return false;
2871 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2872 return 1 if every piece is the integer constant one.
2873 Also return 1 for location wrappers for such a constant. */
2875 bool
2876 integer_each_onep (const_tree expr)
2878 STRIP_ANY_LOCATION_WRAPPER (expr);
2880 if (TREE_CODE (expr) == COMPLEX_CST)
2881 return (integer_onep (TREE_REALPART (expr))
2882 && integer_onep (TREE_IMAGPART (expr)));
2883 else
2884 return integer_onep (expr);
2887 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2888 it contains, or a complex or vector whose subparts are such integers,
2889 or a location wrapper for such a constant. */
2891 bool
2892 integer_all_onesp (const_tree expr)
2894 STRIP_ANY_LOCATION_WRAPPER (expr);
2896 if (TREE_CODE (expr) == COMPLEX_CST
2897 && integer_all_onesp (TREE_REALPART (expr))
2898 && integer_all_onesp (TREE_IMAGPART (expr)))
2899 return true;
2901 else if (TREE_CODE (expr) == VECTOR_CST)
2902 return (VECTOR_CST_NPATTERNS (expr) == 1
2903 && VECTOR_CST_DUPLICATE_P (expr)
2904 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2906 else if (TREE_CODE (expr) != INTEGER_CST)
2907 return false;
2909 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2910 == wi::to_wide (expr));
2913 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2914 for such a constant. */
2916 bool
2917 integer_minus_onep (const_tree expr)
2919 STRIP_ANY_LOCATION_WRAPPER (expr);
2921 if (TREE_CODE (expr) == COMPLEX_CST)
2922 return (integer_all_onesp (TREE_REALPART (expr))
2923 && integer_zerop (TREE_IMAGPART (expr)));
2924 else
2925 return integer_all_onesp (expr);
2928 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2929 one bit on), or a location wrapper for such a constant. */
2931 bool
2932 integer_pow2p (const_tree expr)
2934 STRIP_ANY_LOCATION_WRAPPER (expr);
2936 if (TREE_CODE (expr) == COMPLEX_CST
2937 && integer_pow2p (TREE_REALPART (expr))
2938 && integer_zerop (TREE_IMAGPART (expr)))
2939 return true;
2941 if (TREE_CODE (expr) != INTEGER_CST)
2942 return false;
2944 return wi::popcount (wi::to_wide (expr)) == 1;
2947 /* Return 1 if EXPR is an integer constant other than zero or a
2948 complex constant other than zero, or a location wrapper for such a
2949 constant. */
2951 bool
2952 integer_nonzerop (const_tree expr)
2954 STRIP_ANY_LOCATION_WRAPPER (expr);
2956 return ((TREE_CODE (expr) == INTEGER_CST
2957 && wi::to_wide (expr) != 0)
2958 || (TREE_CODE (expr) == COMPLEX_CST
2959 && (integer_nonzerop (TREE_REALPART (expr))
2960 || integer_nonzerop (TREE_IMAGPART (expr)))));
2963 /* Return 1 if EXPR is the integer constant one. For vector,
2964 return 1 if every piece is the integer constant minus one
2965 (representing the value TRUE).
2966 Also return 1 for location wrappers for such a constant. */
2968 bool
2969 integer_truep (const_tree expr)
2971 STRIP_ANY_LOCATION_WRAPPER (expr);
2973 if (TREE_CODE (expr) == VECTOR_CST)
2974 return integer_all_onesp (expr);
2975 return integer_onep (expr);
2978 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2979 for such a constant. */
2981 bool
2982 fixed_zerop (const_tree expr)
2984 STRIP_ANY_LOCATION_WRAPPER (expr);
2986 return (TREE_CODE (expr) == FIXED_CST
2987 && TREE_FIXED_CST (expr).data.is_zero ());
2990 /* Return the power of two represented by a tree node known to be a
2991 power of two. */
2994 tree_log2 (const_tree expr)
2996 if (TREE_CODE (expr) == COMPLEX_CST)
2997 return tree_log2 (TREE_REALPART (expr));
2999 return wi::exact_log2 (wi::to_wide (expr));
3002 /* Similar, but return the largest integer Y such that 2 ** Y is less
3003 than or equal to EXPR. */
3006 tree_floor_log2 (const_tree expr)
3008 if (TREE_CODE (expr) == COMPLEX_CST)
3009 return tree_log2 (TREE_REALPART (expr));
3011 return wi::floor_log2 (wi::to_wide (expr));
3014 /* Return number of known trailing zero bits in EXPR, or, if the value of
3015 EXPR is known to be zero, the precision of it's type. */
3017 unsigned int
3018 tree_ctz (const_tree expr)
3020 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3021 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3022 return 0;
3024 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3025 switch (TREE_CODE (expr))
3027 case INTEGER_CST:
3028 ret1 = wi::ctz (wi::to_wide (expr));
3029 return MIN (ret1, prec);
3030 case SSA_NAME:
3031 ret1 = wi::ctz (get_nonzero_bits (expr));
3032 return MIN (ret1, prec);
3033 case PLUS_EXPR:
3034 case MINUS_EXPR:
3035 case BIT_IOR_EXPR:
3036 case BIT_XOR_EXPR:
3037 case MIN_EXPR:
3038 case MAX_EXPR:
3039 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3040 if (ret1 == 0)
3041 return ret1;
3042 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3043 return MIN (ret1, ret2);
3044 case POINTER_PLUS_EXPR:
3045 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3046 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3047 /* Second operand is sizetype, which could be in theory
3048 wider than pointer's precision. Make sure we never
3049 return more than prec. */
3050 ret2 = MIN (ret2, prec);
3051 return MIN (ret1, ret2);
3052 case BIT_AND_EXPR:
3053 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3054 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3055 return MAX (ret1, ret2);
3056 case MULT_EXPR:
3057 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3058 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3059 return MIN (ret1 + ret2, prec);
3060 case LSHIFT_EXPR:
3061 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3062 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3063 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3065 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3066 return MIN (ret1 + ret2, prec);
3068 return ret1;
3069 case RSHIFT_EXPR:
3070 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3071 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3073 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3074 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3075 if (ret1 > ret2)
3076 return ret1 - ret2;
3078 return 0;
3079 case TRUNC_DIV_EXPR:
3080 case CEIL_DIV_EXPR:
3081 case FLOOR_DIV_EXPR:
3082 case ROUND_DIV_EXPR:
3083 case EXACT_DIV_EXPR:
3084 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3085 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3087 int l = tree_log2 (TREE_OPERAND (expr, 1));
3088 if (l >= 0)
3090 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3091 ret2 = l;
3092 if (ret1 > ret2)
3093 return ret1 - ret2;
3096 return 0;
3097 CASE_CONVERT:
3098 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3099 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3100 ret1 = prec;
3101 return MIN (ret1, prec);
3102 case SAVE_EXPR:
3103 return tree_ctz (TREE_OPERAND (expr, 0));
3104 case COND_EXPR:
3105 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3106 if (ret1 == 0)
3107 return 0;
3108 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3109 return MIN (ret1, ret2);
3110 case COMPOUND_EXPR:
3111 return tree_ctz (TREE_OPERAND (expr, 1));
3112 case ADDR_EXPR:
3113 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3114 if (ret1 > BITS_PER_UNIT)
3116 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3117 return MIN (ret1, prec);
3119 return 0;
3120 default:
3121 return 0;
3125 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3126 decimal float constants, so don't return 1 for them.
3127 Also return 1 for location wrappers around such a constant. */
3129 bool
3130 real_zerop (const_tree expr)
3132 STRIP_ANY_LOCATION_WRAPPER (expr);
3134 switch (TREE_CODE (expr))
3136 case REAL_CST:
3137 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3138 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3139 case COMPLEX_CST:
3140 return real_zerop (TREE_REALPART (expr))
3141 && real_zerop (TREE_IMAGPART (expr));
3142 case VECTOR_CST:
3144 /* Don't simply check for a duplicate because the predicate
3145 accepts both +0.0 and -0.0. */
3146 unsigned count = vector_cst_encoded_nelts (expr);
3147 for (unsigned int i = 0; i < count; ++i)
3148 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3149 return false;
3150 return true;
3152 default:
3153 return false;
3157 /* Return 1 if EXPR is the real constant one in real or complex form.
3158 Trailing zeroes matter for decimal float constants, so don't return
3159 1 for them.
3160 Also return 1 for location wrappers around such a constant. */
3162 bool
3163 real_onep (const_tree expr)
3165 STRIP_ANY_LOCATION_WRAPPER (expr);
3167 switch (TREE_CODE (expr))
3169 case REAL_CST:
3170 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3171 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3172 case COMPLEX_CST:
3173 return real_onep (TREE_REALPART (expr))
3174 && real_zerop (TREE_IMAGPART (expr));
3175 case VECTOR_CST:
3176 return (VECTOR_CST_NPATTERNS (expr) == 1
3177 && VECTOR_CST_DUPLICATE_P (expr)
3178 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3179 default:
3180 return false;
3184 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3185 matter for decimal float constants, so don't return 1 for them.
3186 Also return 1 for location wrappers around such a constant. */
3188 bool
3189 real_minus_onep (const_tree expr)
3191 STRIP_ANY_LOCATION_WRAPPER (expr);
3193 switch (TREE_CODE (expr))
3195 case REAL_CST:
3196 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3197 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3198 case COMPLEX_CST:
3199 return real_minus_onep (TREE_REALPART (expr))
3200 && real_zerop (TREE_IMAGPART (expr));
3201 case VECTOR_CST:
3202 return (VECTOR_CST_NPATTERNS (expr) == 1
3203 && VECTOR_CST_DUPLICATE_P (expr)
3204 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3205 default:
3206 return false;
3210 /* Return true if T could be a floating point zero. */
3212 bool
3213 real_maybe_zerop (const_tree expr)
3215 switch (TREE_CODE (expr))
3217 case REAL_CST:
3218 /* Can't use real_zerop here, as it always returns false for decimal
3219 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3220 either, as decimal zeros are rvc_normal. */
3221 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3222 case COMPLEX_CST:
3223 return (real_maybe_zerop (TREE_REALPART (expr))
3224 || real_maybe_zerop (TREE_IMAGPART (expr)));
3225 case VECTOR_CST:
3227 unsigned count = vector_cst_encoded_nelts (expr);
3228 for (unsigned int i = 0; i < count; ++i)
3229 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3230 return true;
3231 return false;
3233 default:
3234 /* Perhaps for SSA_NAMEs we could query frange. */
3235 return true;
3239 /* Nonzero if EXP is a constant or a cast of a constant. */
3241 bool
3242 really_constant_p (const_tree exp)
3244 /* This is not quite the same as STRIP_NOPS. It does more. */
3245 while (CONVERT_EXPR_P (exp)
3246 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3247 exp = TREE_OPERAND (exp, 0);
3248 return TREE_CONSTANT (exp);
3251 /* Return true if T holds a polynomial pointer difference, storing it in
3252 *VALUE if so. A true return means that T's precision is no greater
3253 than 64 bits, which is the largest address space we support, so *VALUE
3254 never loses precision. However, the signedness of the result does
3255 not necessarily match the signedness of T: sometimes an unsigned type
3256 like sizetype is used to encode a value that is actually negative. */
3258 bool
3259 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3261 if (!t)
3262 return false;
3263 if (TREE_CODE (t) == INTEGER_CST)
3265 if (!cst_and_fits_in_hwi (t))
3266 return false;
3267 *value = int_cst_value (t);
3268 return true;
3270 if (POLY_INT_CST_P (t))
3272 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3273 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3274 return false;
3275 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3276 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3277 return true;
3279 return false;
3282 poly_int64
3283 tree_to_poly_int64 (const_tree t)
3285 gcc_assert (tree_fits_poly_int64_p (t));
3286 if (POLY_INT_CST_P (t))
3287 return poly_int_cst_value (t).force_shwi ();
3288 return TREE_INT_CST_LOW (t);
3291 poly_uint64
3292 tree_to_poly_uint64 (const_tree t)
3294 gcc_assert (tree_fits_poly_uint64_p (t));
3295 if (POLY_INT_CST_P (t))
3296 return poly_int_cst_value (t).force_uhwi ();
3297 return TREE_INT_CST_LOW (t);
3300 /* Return first list element whose TREE_VALUE is ELEM.
3301 Return 0 if ELEM is not in LIST. */
3303 tree
3304 value_member (tree elem, tree list)
3306 while (list)
3308 if (elem == TREE_VALUE (list))
3309 return list;
3310 list = TREE_CHAIN (list);
3312 return NULL_TREE;
3315 /* Return first list element whose TREE_PURPOSE is ELEM.
3316 Return 0 if ELEM is not in LIST. */
3318 tree
3319 purpose_member (const_tree elem, tree list)
3321 while (list)
3323 if (elem == TREE_PURPOSE (list))
3324 return list;
3325 list = TREE_CHAIN (list);
3327 return NULL_TREE;
3330 /* Return true if ELEM is in V. */
3332 bool
3333 vec_member (const_tree elem, vec<tree, va_gc> *v)
3335 unsigned ix;
3336 tree t;
3337 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3338 if (elem == t)
3339 return true;
3340 return false;
3343 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3344 NULL_TREE. */
3346 tree
3347 chain_index (int idx, tree chain)
3349 for (; chain && idx > 0; --idx)
3350 chain = TREE_CHAIN (chain);
3351 return chain;
3354 /* Return nonzero if ELEM is part of the chain CHAIN. */
3356 bool
3357 chain_member (const_tree elem, const_tree chain)
3359 while (chain)
3361 if (elem == chain)
3362 return true;
3363 chain = DECL_CHAIN (chain);
3366 return false;
3369 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3370 We expect a null pointer to mark the end of the chain.
3371 This is the Lisp primitive `length'. */
3374 list_length (const_tree t)
3376 const_tree p = t;
3377 #ifdef ENABLE_TREE_CHECKING
3378 const_tree q = t;
3379 #endif
3380 int len = 0;
3382 while (p)
3384 p = TREE_CHAIN (p);
3385 #ifdef ENABLE_TREE_CHECKING
3386 if (len % 2)
3387 q = TREE_CHAIN (q);
3388 gcc_assert (p != q);
3389 #endif
3390 len++;
3393 return len;
3396 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3397 UNION_TYPE TYPE, or NULL_TREE if none. */
3399 tree
3400 first_field (const_tree type)
3402 tree t = TYPE_FIELDS (type);
3403 while (t && TREE_CODE (t) != FIELD_DECL)
3404 t = TREE_CHAIN (t);
3405 return t;
3408 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3409 UNION_TYPE TYPE, or NULL_TREE if none. */
3411 tree
3412 last_field (const_tree type)
3414 tree last = NULL_TREE;
3416 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3418 if (TREE_CODE (fld) != FIELD_DECL)
3419 continue;
3421 last = fld;
3424 return last;
3427 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3428 by modifying the last node in chain 1 to point to chain 2.
3429 This is the Lisp primitive `nconc'. */
3431 tree
3432 chainon (tree op1, tree op2)
3434 tree t1;
3436 if (!op1)
3437 return op2;
3438 if (!op2)
3439 return op1;
3441 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3442 continue;
3443 TREE_CHAIN (t1) = op2;
3445 #ifdef ENABLE_TREE_CHECKING
3447 tree t2;
3448 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3449 gcc_assert (t2 != t1);
3451 #endif
3453 return op1;
3456 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3458 tree
3459 tree_last (tree chain)
3461 tree next;
3462 if (chain)
3463 while ((next = TREE_CHAIN (chain)))
3464 chain = next;
3465 return chain;
3468 /* Reverse the order of elements in the chain T,
3469 and return the new head of the chain (old last element). */
3471 tree
3472 nreverse (tree t)
3474 tree prev = 0, decl, next;
3475 for (decl = t; decl; decl = next)
3477 /* We shouldn't be using this function to reverse BLOCK chains; we
3478 have blocks_nreverse for that. */
3479 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3480 next = TREE_CHAIN (decl);
3481 TREE_CHAIN (decl) = prev;
3482 prev = decl;
3484 return prev;
3487 /* Return a newly created TREE_LIST node whose
3488 purpose and value fields are PARM and VALUE. */
3490 tree
3491 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3493 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3494 TREE_PURPOSE (t) = parm;
3495 TREE_VALUE (t) = value;
3496 return t;
3499 /* Build a chain of TREE_LIST nodes from a vector. */
3501 tree
3502 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3504 tree ret = NULL_TREE;
3505 tree *pp = &ret;
3506 unsigned int i;
3507 tree t;
3508 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3510 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3511 pp = &TREE_CHAIN (*pp);
3513 return ret;
3516 /* Return a newly created TREE_LIST node whose
3517 purpose and value fields are PURPOSE and VALUE
3518 and whose TREE_CHAIN is CHAIN. */
3520 tree
3521 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3523 tree node;
3525 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3526 memset (node, 0, sizeof (struct tree_common));
3528 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3530 TREE_SET_CODE (node, TREE_LIST);
3531 TREE_CHAIN (node) = chain;
3532 TREE_PURPOSE (node) = purpose;
3533 TREE_VALUE (node) = value;
3534 return node;
3537 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3538 trees. */
3540 vec<tree, va_gc> *
3541 ctor_to_vec (tree ctor)
3543 vec<tree, va_gc> *vec;
3544 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3545 unsigned int ix;
3546 tree val;
3548 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3549 vec->quick_push (val);
3551 return vec;
3554 /* Return the size nominally occupied by an object of type TYPE
3555 when it resides in memory. The value is measured in units of bytes,
3556 and its data type is that normally used for type sizes
3557 (which is the first type created by make_signed_type or
3558 make_unsigned_type). */
3560 tree
3561 size_in_bytes_loc (location_t loc, const_tree type)
3563 tree t;
3565 if (type == error_mark_node)
3566 return integer_zero_node;
3568 type = TYPE_MAIN_VARIANT (type);
3569 t = TYPE_SIZE_UNIT (type);
3571 if (t == 0)
3573 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3574 return size_zero_node;
3577 return t;
3580 /* Return the size of TYPE (in bytes) as a wide integer
3581 or return -1 if the size can vary or is larger than an integer. */
3583 HOST_WIDE_INT
3584 int_size_in_bytes (const_tree type)
3586 tree t;
3588 if (type == error_mark_node)
3589 return 0;
3591 type = TYPE_MAIN_VARIANT (type);
3592 t = TYPE_SIZE_UNIT (type);
3594 if (t && tree_fits_uhwi_p (t))
3595 return TREE_INT_CST_LOW (t);
3596 else
3597 return -1;
3600 /* Return the maximum size of TYPE (in bytes) as a wide integer
3601 or return -1 if the size can vary or is larger than an integer. */
3603 HOST_WIDE_INT
3604 max_int_size_in_bytes (const_tree type)
3606 HOST_WIDE_INT size = -1;
3607 tree size_tree;
3609 /* If this is an array type, check for a possible MAX_SIZE attached. */
3611 if (TREE_CODE (type) == ARRAY_TYPE)
3613 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3615 if (size_tree && tree_fits_uhwi_p (size_tree))
3616 size = tree_to_uhwi (size_tree);
3619 /* If we still haven't been able to get a size, see if the language
3620 can compute a maximum size. */
3622 if (size == -1)
3624 size_tree = lang_hooks.types.max_size (type);
3626 if (size_tree && tree_fits_uhwi_p (size_tree))
3627 size = tree_to_uhwi (size_tree);
3630 return size;
3633 /* Return the bit position of FIELD, in bits from the start of the record.
3634 This is a tree of type bitsizetype. */
3636 tree
3637 bit_position (const_tree field)
3639 return bit_from_pos (DECL_FIELD_OFFSET (field),
3640 DECL_FIELD_BIT_OFFSET (field));
3643 /* Return the byte position of FIELD, in bytes from the start of the record.
3644 This is a tree of type sizetype. */
3646 tree
3647 byte_position (const_tree field)
3649 return byte_from_pos (DECL_FIELD_OFFSET (field),
3650 DECL_FIELD_BIT_OFFSET (field));
3653 /* Likewise, but return as an integer. It must be representable in
3654 that way (since it could be a signed value, we don't have the
3655 option of returning -1 like int_size_in_byte can. */
3657 HOST_WIDE_INT
3658 int_byte_position (const_tree field)
3660 return tree_to_shwi (byte_position (field));
3663 /* Return, as a tree node, the number of elements for TYPE (which is an
3664 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3666 tree
3667 array_type_nelts (const_tree type)
3669 tree index_type, min, max;
3671 /* If they did it with unspecified bounds, then we should have already
3672 given an error about it before we got here. */
3673 if (! TYPE_DOMAIN (type))
3674 return error_mark_node;
3676 index_type = TYPE_DOMAIN (type);
3677 min = TYPE_MIN_VALUE (index_type);
3678 max = TYPE_MAX_VALUE (index_type);
3680 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3681 if (!max)
3683 /* zero sized arrays are represented from C FE as complete types with
3684 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3685 them as min 0, max -1. */
3686 if (COMPLETE_TYPE_P (type)
3687 && integer_zerop (TYPE_SIZE (type))
3688 && integer_zerop (min))
3689 return build_int_cst (TREE_TYPE (min), -1);
3691 return error_mark_node;
3694 return (integer_zerop (min)
3695 ? max
3696 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3699 /* If arg is static -- a reference to an object in static storage -- then
3700 return the object. This is not the same as the C meaning of `static'.
3701 If arg isn't static, return NULL. */
3703 tree
3704 staticp (tree arg)
3706 switch (TREE_CODE (arg))
3708 case FUNCTION_DECL:
3709 /* Nested functions are static, even though taking their address will
3710 involve a trampoline as we unnest the nested function and create
3711 the trampoline on the tree level. */
3712 return arg;
3714 case VAR_DECL:
3715 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3716 && ! DECL_THREAD_LOCAL_P (arg)
3717 && ! DECL_DLLIMPORT_P (arg)
3718 ? arg : NULL);
3720 case CONST_DECL:
3721 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3722 ? arg : NULL);
3724 case CONSTRUCTOR:
3725 return TREE_STATIC (arg) ? arg : NULL;
3727 case LABEL_DECL:
3728 case STRING_CST:
3729 return arg;
3731 case COMPONENT_REF:
3732 /* If the thing being referenced is not a field, then it is
3733 something language specific. */
3734 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3736 /* If we are referencing a bitfield, we can't evaluate an
3737 ADDR_EXPR at compile time and so it isn't a constant. */
3738 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3739 return NULL;
3741 return staticp (TREE_OPERAND (arg, 0));
3743 case BIT_FIELD_REF:
3744 return NULL;
3746 case INDIRECT_REF:
3747 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3749 case ARRAY_REF:
3750 case ARRAY_RANGE_REF:
3751 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3752 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3753 return staticp (TREE_OPERAND (arg, 0));
3754 else
3755 return NULL;
3757 case COMPOUND_LITERAL_EXPR:
3758 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3760 default:
3761 return NULL;
3768 /* Return whether OP is a DECL whose address is function-invariant. */
3770 bool
3771 decl_address_invariant_p (const_tree op)
3773 /* The conditions below are slightly less strict than the one in
3774 staticp. */
3776 switch (TREE_CODE (op))
3778 case PARM_DECL:
3779 case RESULT_DECL:
3780 case LABEL_DECL:
3781 case FUNCTION_DECL:
3782 return true;
3784 case VAR_DECL:
3785 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3786 || DECL_THREAD_LOCAL_P (op)
3787 || DECL_CONTEXT (op) == current_function_decl
3788 || decl_function_context (op) == current_function_decl)
3789 return true;
3790 break;
3792 case CONST_DECL:
3793 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3794 || decl_function_context (op) == current_function_decl)
3795 return true;
3796 break;
3798 default:
3799 break;
3802 return false;
3805 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3807 bool
3808 decl_address_ip_invariant_p (const_tree op)
3810 /* The conditions below are slightly less strict than the one in
3811 staticp. */
3813 switch (TREE_CODE (op))
3815 case LABEL_DECL:
3816 case FUNCTION_DECL:
3817 case STRING_CST:
3818 return true;
3820 case VAR_DECL:
3821 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3822 && !DECL_DLLIMPORT_P (op))
3823 || DECL_THREAD_LOCAL_P (op))
3824 return true;
3825 break;
3827 case CONST_DECL:
3828 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3829 return true;
3830 break;
3832 default:
3833 break;
3836 return false;
3840 /* Return true if T is function-invariant (internal function, does
3841 not handle arithmetic; that's handled in skip_simple_arithmetic and
3842 tree_invariant_p). */
3844 static bool
3845 tree_invariant_p_1 (tree t)
3847 tree op;
3849 if (TREE_CONSTANT (t)
3850 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3851 return true;
3853 switch (TREE_CODE (t))
3855 case SAVE_EXPR:
3856 return true;
3858 case ADDR_EXPR:
3859 op = TREE_OPERAND (t, 0);
3860 while (handled_component_p (op))
3862 switch (TREE_CODE (op))
3864 case ARRAY_REF:
3865 case ARRAY_RANGE_REF:
3866 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3867 || TREE_OPERAND (op, 2) != NULL_TREE
3868 || TREE_OPERAND (op, 3) != NULL_TREE)
3869 return false;
3870 break;
3872 case COMPONENT_REF:
3873 if (TREE_OPERAND (op, 2) != NULL_TREE)
3874 return false;
3875 break;
3877 default:;
3879 op = TREE_OPERAND (op, 0);
3882 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3884 default:
3885 break;
3888 return false;
3891 /* Return true if T is function-invariant. */
3893 bool
3894 tree_invariant_p (tree t)
3896 tree inner = skip_simple_arithmetic (t);
3897 return tree_invariant_p_1 (inner);
3900 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3901 Do this to any expression which may be used in more than one place,
3902 but must be evaluated only once.
3904 Normally, expand_expr would reevaluate the expression each time.
3905 Calling save_expr produces something that is evaluated and recorded
3906 the first time expand_expr is called on it. Subsequent calls to
3907 expand_expr just reuse the recorded value.
3909 The call to expand_expr that generates code that actually computes
3910 the value is the first call *at compile time*. Subsequent calls
3911 *at compile time* generate code to use the saved value.
3912 This produces correct result provided that *at run time* control
3913 always flows through the insns made by the first expand_expr
3914 before reaching the other places where the save_expr was evaluated.
3915 You, the caller of save_expr, must make sure this is so.
3917 Constants, and certain read-only nodes, are returned with no
3918 SAVE_EXPR because that is safe. Expressions containing placeholders
3919 are not touched; see tree.def for an explanation of what these
3920 are used for. */
3922 tree
3923 save_expr (tree expr)
3925 tree inner;
3927 /* If the tree evaluates to a constant, then we don't want to hide that
3928 fact (i.e. this allows further folding, and direct checks for constants).
3929 However, a read-only object that has side effects cannot be bypassed.
3930 Since it is no problem to reevaluate literals, we just return the
3931 literal node. */
3932 inner = skip_simple_arithmetic (expr);
3933 if (TREE_CODE (inner) == ERROR_MARK)
3934 return inner;
3936 if (tree_invariant_p_1 (inner))
3937 return expr;
3939 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3940 it means that the size or offset of some field of an object depends on
3941 the value within another field.
3943 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3944 and some variable since it would then need to be both evaluated once and
3945 evaluated more than once. Front-ends must assure this case cannot
3946 happen by surrounding any such subexpressions in their own SAVE_EXPR
3947 and forcing evaluation at the proper time. */
3948 if (contains_placeholder_p (inner))
3949 return expr;
3951 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3953 /* This expression might be placed ahead of a jump to ensure that the
3954 value was computed on both sides of the jump. So make sure it isn't
3955 eliminated as dead. */
3956 TREE_SIDE_EFFECTS (expr) = 1;
3957 return expr;
3960 /* Look inside EXPR into any simple arithmetic operations. Return the
3961 outermost non-arithmetic or non-invariant node. */
3963 tree
3964 skip_simple_arithmetic (tree expr)
3966 /* We don't care about whether this can be used as an lvalue in this
3967 context. */
3968 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3969 expr = TREE_OPERAND (expr, 0);
3971 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3972 a constant, it will be more efficient to not make another SAVE_EXPR since
3973 it will allow better simplification and GCSE will be able to merge the
3974 computations if they actually occur. */
3975 while (true)
3977 if (UNARY_CLASS_P (expr))
3978 expr = TREE_OPERAND (expr, 0);
3979 else if (BINARY_CLASS_P (expr))
3981 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3982 expr = TREE_OPERAND (expr, 0);
3983 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3984 expr = TREE_OPERAND (expr, 1);
3985 else
3986 break;
3988 else
3989 break;
3992 return expr;
3995 /* Look inside EXPR into simple arithmetic operations involving constants.
3996 Return the outermost non-arithmetic or non-constant node. */
3998 tree
3999 skip_simple_constant_arithmetic (tree expr)
4001 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4002 expr = TREE_OPERAND (expr, 0);
4004 while (true)
4006 if (UNARY_CLASS_P (expr))
4007 expr = TREE_OPERAND (expr, 0);
4008 else if (BINARY_CLASS_P (expr))
4010 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4011 expr = TREE_OPERAND (expr, 0);
4012 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4013 expr = TREE_OPERAND (expr, 1);
4014 else
4015 break;
4017 else
4018 break;
4021 return expr;
4024 /* Return which tree structure is used by T. */
4026 enum tree_node_structure_enum
4027 tree_node_structure (const_tree t)
4029 const enum tree_code code = TREE_CODE (t);
4030 return tree_node_structure_for_code (code);
4033 /* Set various status flags when building a CALL_EXPR object T. */
4035 static void
4036 process_call_operands (tree t)
4038 bool side_effects = TREE_SIDE_EFFECTS (t);
4039 bool read_only = false;
4040 int i = call_expr_flags (t);
4042 /* Calls have side-effects, except those to const or pure functions. */
4043 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4044 side_effects = true;
4045 /* Propagate TREE_READONLY of arguments for const functions. */
4046 if (i & ECF_CONST)
4047 read_only = true;
4049 if (!side_effects || read_only)
4050 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4052 tree op = TREE_OPERAND (t, i);
4053 if (op && TREE_SIDE_EFFECTS (op))
4054 side_effects = true;
4055 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4056 read_only = false;
4059 TREE_SIDE_EFFECTS (t) = side_effects;
4060 TREE_READONLY (t) = read_only;
4063 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4064 size or offset that depends on a field within a record. */
4066 bool
4067 contains_placeholder_p (const_tree exp)
4069 enum tree_code code;
4071 if (!exp)
4072 return 0;
4074 code = TREE_CODE (exp);
4075 if (code == PLACEHOLDER_EXPR)
4076 return 1;
4078 switch (TREE_CODE_CLASS (code))
4080 case tcc_reference:
4081 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4082 position computations since they will be converted into a
4083 WITH_RECORD_EXPR involving the reference, which will assume
4084 here will be valid. */
4085 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4087 case tcc_exceptional:
4088 if (code == TREE_LIST)
4089 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4090 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4091 break;
4093 case tcc_unary:
4094 case tcc_binary:
4095 case tcc_comparison:
4096 case tcc_expression:
4097 switch (code)
4099 case COMPOUND_EXPR:
4100 /* Ignoring the first operand isn't quite right, but works best. */
4101 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4103 case COND_EXPR:
4104 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4105 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4106 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4108 case SAVE_EXPR:
4109 /* The save_expr function never wraps anything containing
4110 a PLACEHOLDER_EXPR. */
4111 return 0;
4113 default:
4114 break;
4117 switch (TREE_CODE_LENGTH (code))
4119 case 1:
4120 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4121 case 2:
4122 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4123 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4124 default:
4125 return 0;
4128 case tcc_vl_exp:
4129 switch (code)
4131 case CALL_EXPR:
4133 const_tree arg;
4134 const_call_expr_arg_iterator iter;
4135 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4136 if (CONTAINS_PLACEHOLDER_P (arg))
4137 return 1;
4138 return 0;
4140 default:
4141 return 0;
4144 default:
4145 return 0;
4147 return 0;
4150 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4151 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4152 field positions. */
4154 static bool
4155 type_contains_placeholder_1 (const_tree type)
4157 /* If the size contains a placeholder or the parent type (component type in
4158 the case of arrays) type involves a placeholder, this type does. */
4159 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4160 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4161 || (!POINTER_TYPE_P (type)
4162 && TREE_TYPE (type)
4163 && type_contains_placeholder_p (TREE_TYPE (type))))
4164 return true;
4166 /* Now do type-specific checks. Note that the last part of the check above
4167 greatly limits what we have to do below. */
4168 switch (TREE_CODE (type))
4170 case VOID_TYPE:
4171 case OPAQUE_TYPE:
4172 case COMPLEX_TYPE:
4173 case ENUMERAL_TYPE:
4174 case BOOLEAN_TYPE:
4175 case POINTER_TYPE:
4176 case OFFSET_TYPE:
4177 case REFERENCE_TYPE:
4178 case METHOD_TYPE:
4179 case FUNCTION_TYPE:
4180 case VECTOR_TYPE:
4181 case NULLPTR_TYPE:
4182 return false;
4184 case INTEGER_TYPE:
4185 case REAL_TYPE:
4186 case FIXED_POINT_TYPE:
4187 /* Here we just check the bounds. */
4188 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4189 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4191 case ARRAY_TYPE:
4192 /* We have already checked the component type above, so just check
4193 the domain type. Flexible array members have a null domain. */
4194 return TYPE_DOMAIN (type) ?
4195 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4197 case RECORD_TYPE:
4198 case UNION_TYPE:
4199 case QUAL_UNION_TYPE:
4201 tree field;
4203 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4204 if (TREE_CODE (field) == FIELD_DECL
4205 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4206 || (TREE_CODE (type) == QUAL_UNION_TYPE
4207 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4208 || type_contains_placeholder_p (TREE_TYPE (field))))
4209 return true;
4211 return false;
4214 default:
4215 gcc_unreachable ();
4219 /* Wrapper around above function used to cache its result. */
4221 bool
4222 type_contains_placeholder_p (tree type)
4224 bool result;
4226 /* If the contains_placeholder_bits field has been initialized,
4227 then we know the answer. */
4228 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4229 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4231 /* Indicate that we've seen this type node, and the answer is false.
4232 This is what we want to return if we run into recursion via fields. */
4233 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4235 /* Compute the real value. */
4236 result = type_contains_placeholder_1 (type);
4238 /* Store the real value. */
4239 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4241 return result;
4244 /* Push tree EXP onto vector QUEUE if it is not already present. */
4246 static void
4247 push_without_duplicates (tree exp, vec<tree> *queue)
4249 unsigned int i;
4250 tree iter;
4252 FOR_EACH_VEC_ELT (*queue, i, iter)
4253 if (simple_cst_equal (iter, exp) == 1)
4254 break;
4256 if (!iter)
4257 queue->safe_push (exp);
4260 /* Given a tree EXP, find all occurrences of references to fields
4261 in a PLACEHOLDER_EXPR and place them in vector REFS without
4262 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4263 we assume here that EXP contains only arithmetic expressions
4264 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4265 argument list. */
4267 void
4268 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4270 enum tree_code code = TREE_CODE (exp);
4271 tree inner;
4272 int i;
4274 /* We handle TREE_LIST and COMPONENT_REF separately. */
4275 if (code == TREE_LIST)
4277 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4278 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4280 else if (code == COMPONENT_REF)
4282 for (inner = TREE_OPERAND (exp, 0);
4283 REFERENCE_CLASS_P (inner);
4284 inner = TREE_OPERAND (inner, 0))
4287 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4288 push_without_duplicates (exp, refs);
4289 else
4290 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4292 else
4293 switch (TREE_CODE_CLASS (code))
4295 case tcc_constant:
4296 break;
4298 case tcc_declaration:
4299 /* Variables allocated to static storage can stay. */
4300 if (!TREE_STATIC (exp))
4301 push_without_duplicates (exp, refs);
4302 break;
4304 case tcc_expression:
4305 /* This is the pattern built in ada/make_aligning_type. */
4306 if (code == ADDR_EXPR
4307 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4309 push_without_duplicates (exp, refs);
4310 break;
4313 /* Fall through. */
4315 case tcc_exceptional:
4316 case tcc_unary:
4317 case tcc_binary:
4318 case tcc_comparison:
4319 case tcc_reference:
4320 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4321 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4322 break;
4324 case tcc_vl_exp:
4325 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4326 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4327 break;
4329 default:
4330 gcc_unreachable ();
4334 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4335 return a tree with all occurrences of references to F in a
4336 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4337 CONST_DECLs. Note that we assume here that EXP contains only
4338 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4339 occurring only in their argument list. */
4341 tree
4342 substitute_in_expr (tree exp, tree f, tree r)
4344 enum tree_code code = TREE_CODE (exp);
4345 tree op0, op1, op2, op3;
4346 tree new_tree;
4348 /* We handle TREE_LIST and COMPONENT_REF separately. */
4349 if (code == TREE_LIST)
4351 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4352 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4353 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4354 return exp;
4356 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4358 else if (code == COMPONENT_REF)
4360 tree inner;
4362 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4363 and it is the right field, replace it with R. */
4364 for (inner = TREE_OPERAND (exp, 0);
4365 REFERENCE_CLASS_P (inner);
4366 inner = TREE_OPERAND (inner, 0))
4369 /* The field. */
4370 op1 = TREE_OPERAND (exp, 1);
4372 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4373 return r;
4375 /* If this expression hasn't been completed let, leave it alone. */
4376 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4377 return exp;
4379 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4380 if (op0 == TREE_OPERAND (exp, 0))
4381 return exp;
4383 new_tree
4384 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4386 else
4387 switch (TREE_CODE_CLASS (code))
4389 case tcc_constant:
4390 return exp;
4392 case tcc_declaration:
4393 if (exp == f)
4394 return r;
4395 else
4396 return exp;
4398 case tcc_expression:
4399 if (exp == f)
4400 return r;
4402 /* Fall through. */
4404 case tcc_exceptional:
4405 case tcc_unary:
4406 case tcc_binary:
4407 case tcc_comparison:
4408 case tcc_reference:
4409 switch (TREE_CODE_LENGTH (code))
4411 case 0:
4412 return exp;
4414 case 1:
4415 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4416 if (op0 == TREE_OPERAND (exp, 0))
4417 return exp;
4419 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4420 break;
4422 case 2:
4423 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4424 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4426 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4427 return exp;
4429 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4430 break;
4432 case 3:
4433 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4434 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4435 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4437 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4438 && op2 == TREE_OPERAND (exp, 2))
4439 return exp;
4441 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4442 break;
4444 case 4:
4445 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4446 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4447 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4448 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4450 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4451 && op2 == TREE_OPERAND (exp, 2)
4452 && op3 == TREE_OPERAND (exp, 3))
4453 return exp;
4455 new_tree
4456 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4457 break;
4459 default:
4460 gcc_unreachable ();
4462 break;
4464 case tcc_vl_exp:
4466 int i;
4468 new_tree = NULL_TREE;
4470 /* If we are trying to replace F with a constant or with another
4471 instance of one of the arguments of the call, inline back
4472 functions which do nothing else than computing a value from
4473 the arguments they are passed. This makes it possible to
4474 fold partially or entirely the replacement expression. */
4475 if (code == CALL_EXPR)
4477 bool maybe_inline = false;
4478 if (CONSTANT_CLASS_P (r))
4479 maybe_inline = true;
4480 else
4481 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4482 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4484 maybe_inline = true;
4485 break;
4487 if (maybe_inline)
4489 tree t = maybe_inline_call_in_expr (exp);
4490 if (t)
4491 return SUBSTITUTE_IN_EXPR (t, f, r);
4495 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4497 tree op = TREE_OPERAND (exp, i);
4498 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4499 if (new_op != op)
4501 if (!new_tree)
4502 new_tree = copy_node (exp);
4503 TREE_OPERAND (new_tree, i) = new_op;
4507 if (new_tree)
4509 new_tree = fold (new_tree);
4510 if (TREE_CODE (new_tree) == CALL_EXPR)
4511 process_call_operands (new_tree);
4513 else
4514 return exp;
4516 break;
4518 default:
4519 gcc_unreachable ();
4522 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4524 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4525 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4527 return new_tree;
4530 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4531 for it within OBJ, a tree that is an object or a chain of references. */
4533 tree
4534 substitute_placeholder_in_expr (tree exp, tree obj)
4536 enum tree_code code = TREE_CODE (exp);
4537 tree op0, op1, op2, op3;
4538 tree new_tree;
4540 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4541 in the chain of OBJ. */
4542 if (code == PLACEHOLDER_EXPR)
4544 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4545 tree elt;
4547 for (elt = obj; elt != 0;
4548 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4549 || TREE_CODE (elt) == COND_EXPR)
4550 ? TREE_OPERAND (elt, 1)
4551 : (REFERENCE_CLASS_P (elt)
4552 || UNARY_CLASS_P (elt)
4553 || BINARY_CLASS_P (elt)
4554 || VL_EXP_CLASS_P (elt)
4555 || EXPRESSION_CLASS_P (elt))
4556 ? TREE_OPERAND (elt, 0) : 0))
4557 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4558 return elt;
4560 for (elt = obj; elt != 0;
4561 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4562 || TREE_CODE (elt) == COND_EXPR)
4563 ? TREE_OPERAND (elt, 1)
4564 : (REFERENCE_CLASS_P (elt)
4565 || UNARY_CLASS_P (elt)
4566 || BINARY_CLASS_P (elt)
4567 || VL_EXP_CLASS_P (elt)
4568 || EXPRESSION_CLASS_P (elt))
4569 ? TREE_OPERAND (elt, 0) : 0))
4570 if (POINTER_TYPE_P (TREE_TYPE (elt))
4571 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4572 == need_type))
4573 return fold_build1 (INDIRECT_REF, need_type, elt);
4575 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4576 survives until RTL generation, there will be an error. */
4577 return exp;
4580 /* TREE_LIST is special because we need to look at TREE_VALUE
4581 and TREE_CHAIN, not TREE_OPERANDS. */
4582 else if (code == TREE_LIST)
4584 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4585 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4586 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4587 return exp;
4589 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4591 else
4592 switch (TREE_CODE_CLASS (code))
4594 case tcc_constant:
4595 case tcc_declaration:
4596 return exp;
4598 case tcc_exceptional:
4599 case tcc_unary:
4600 case tcc_binary:
4601 case tcc_comparison:
4602 case tcc_expression:
4603 case tcc_reference:
4604 case tcc_statement:
4605 switch (TREE_CODE_LENGTH (code))
4607 case 0:
4608 return exp;
4610 case 1:
4611 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4612 if (op0 == TREE_OPERAND (exp, 0))
4613 return exp;
4615 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4616 break;
4618 case 2:
4619 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4620 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4622 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4623 return exp;
4625 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4626 break;
4628 case 3:
4629 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4630 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4631 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4633 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4634 && op2 == TREE_OPERAND (exp, 2))
4635 return exp;
4637 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4638 break;
4640 case 4:
4641 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4642 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4643 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4644 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4646 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4647 && op2 == TREE_OPERAND (exp, 2)
4648 && op3 == TREE_OPERAND (exp, 3))
4649 return exp;
4651 new_tree
4652 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4653 break;
4655 default:
4656 gcc_unreachable ();
4658 break;
4660 case tcc_vl_exp:
4662 int i;
4664 new_tree = NULL_TREE;
4666 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4668 tree op = TREE_OPERAND (exp, i);
4669 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4670 if (new_op != op)
4672 if (!new_tree)
4673 new_tree = copy_node (exp);
4674 TREE_OPERAND (new_tree, i) = new_op;
4678 if (new_tree)
4680 new_tree = fold (new_tree);
4681 if (TREE_CODE (new_tree) == CALL_EXPR)
4682 process_call_operands (new_tree);
4684 else
4685 return exp;
4687 break;
4689 default:
4690 gcc_unreachable ();
4693 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4695 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4696 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4698 return new_tree;
4702 /* Subroutine of stabilize_reference; this is called for subtrees of
4703 references. Any expression with side-effects must be put in a SAVE_EXPR
4704 to ensure that it is only evaluated once.
4706 We don't put SAVE_EXPR nodes around everything, because assigning very
4707 simple expressions to temporaries causes us to miss good opportunities
4708 for optimizations. Among other things, the opportunity to fold in the
4709 addition of a constant into an addressing mode often gets lost, e.g.
4710 "y[i+1] += x;". In general, we take the approach that we should not make
4711 an assignment unless we are forced into it - i.e., that any non-side effect
4712 operator should be allowed, and that cse should take care of coalescing
4713 multiple utterances of the same expression should that prove fruitful. */
4715 static tree
4716 stabilize_reference_1 (tree e)
4718 tree result;
4719 enum tree_code code = TREE_CODE (e);
4721 /* We cannot ignore const expressions because it might be a reference
4722 to a const array but whose index contains side-effects. But we can
4723 ignore things that are actual constant or that already have been
4724 handled by this function. */
4726 if (tree_invariant_p (e))
4727 return e;
4729 switch (TREE_CODE_CLASS (code))
4731 case tcc_exceptional:
4732 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4733 have side-effects. */
4734 if (code == STATEMENT_LIST)
4735 return save_expr (e);
4736 /* FALLTHRU */
4737 case tcc_type:
4738 case tcc_declaration:
4739 case tcc_comparison:
4740 case tcc_statement:
4741 case tcc_expression:
4742 case tcc_reference:
4743 case tcc_vl_exp:
4744 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4745 so that it will only be evaluated once. */
4746 /* The reference (r) and comparison (<) classes could be handled as
4747 below, but it is generally faster to only evaluate them once. */
4748 if (TREE_SIDE_EFFECTS (e))
4749 return save_expr (e);
4750 return e;
4752 case tcc_constant:
4753 /* Constants need no processing. In fact, we should never reach
4754 here. */
4755 return e;
4757 case tcc_binary:
4758 /* Division is slow and tends to be compiled with jumps,
4759 especially the division by powers of 2 that is often
4760 found inside of an array reference. So do it just once. */
4761 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4762 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4763 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4764 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4765 return save_expr (e);
4766 /* Recursively stabilize each operand. */
4767 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4768 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4769 break;
4771 case tcc_unary:
4772 /* Recursively stabilize each operand. */
4773 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4774 break;
4776 default:
4777 gcc_unreachable ();
4780 TREE_TYPE (result) = TREE_TYPE (e);
4781 TREE_READONLY (result) = TREE_READONLY (e);
4782 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4783 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4785 return result;
4788 /* Stabilize a reference so that we can use it any number of times
4789 without causing its operands to be evaluated more than once.
4790 Returns the stabilized reference. This works by means of save_expr,
4791 so see the caveats in the comments about save_expr.
4793 Also allows conversion expressions whose operands are references.
4794 Any other kind of expression is returned unchanged. */
4796 tree
4797 stabilize_reference (tree ref)
4799 tree result;
4800 enum tree_code code = TREE_CODE (ref);
4802 switch (code)
4804 case VAR_DECL:
4805 case PARM_DECL:
4806 case RESULT_DECL:
4807 /* No action is needed in this case. */
4808 return ref;
4810 CASE_CONVERT:
4811 case FLOAT_EXPR:
4812 case FIX_TRUNC_EXPR:
4813 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4814 break;
4816 case INDIRECT_REF:
4817 result = build_nt (INDIRECT_REF,
4818 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4819 break;
4821 case COMPONENT_REF:
4822 result = build_nt (COMPONENT_REF,
4823 stabilize_reference (TREE_OPERAND (ref, 0)),
4824 TREE_OPERAND (ref, 1), NULL_TREE);
4825 break;
4827 case BIT_FIELD_REF:
4828 result = build_nt (BIT_FIELD_REF,
4829 stabilize_reference (TREE_OPERAND (ref, 0)),
4830 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4831 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4832 break;
4834 case ARRAY_REF:
4835 result = build_nt (ARRAY_REF,
4836 stabilize_reference (TREE_OPERAND (ref, 0)),
4837 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4838 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4839 break;
4841 case ARRAY_RANGE_REF:
4842 result = build_nt (ARRAY_RANGE_REF,
4843 stabilize_reference (TREE_OPERAND (ref, 0)),
4844 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4845 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4846 break;
4848 case COMPOUND_EXPR:
4849 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4850 it wouldn't be ignored. This matters when dealing with
4851 volatiles. */
4852 return stabilize_reference_1 (ref);
4854 /* If arg isn't a kind of lvalue we recognize, make no change.
4855 Caller should recognize the error for an invalid lvalue. */
4856 default:
4857 return ref;
4859 case ERROR_MARK:
4860 return error_mark_node;
4863 TREE_TYPE (result) = TREE_TYPE (ref);
4864 TREE_READONLY (result) = TREE_READONLY (ref);
4865 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4866 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4867 protected_set_expr_location (result, EXPR_LOCATION (ref));
4869 return result;
4872 /* Low-level constructors for expressions. */
4874 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4875 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4877 void
4878 recompute_tree_invariant_for_addr_expr (tree t)
4880 tree node;
4881 bool tc = true, se = false;
4883 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4885 /* We started out assuming this address is both invariant and constant, but
4886 does not have side effects. Now go down any handled components and see if
4887 any of them involve offsets that are either non-constant or non-invariant.
4888 Also check for side-effects.
4890 ??? Note that this code makes no attempt to deal with the case where
4891 taking the address of something causes a copy due to misalignment. */
4893 #define UPDATE_FLAGS(NODE) \
4894 do { tree _node = (NODE); \
4895 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4896 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4898 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4899 node = TREE_OPERAND (node, 0))
4901 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4902 array reference (probably made temporarily by the G++ front end),
4903 so ignore all the operands. */
4904 if ((TREE_CODE (node) == ARRAY_REF
4905 || TREE_CODE (node) == ARRAY_RANGE_REF)
4906 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4908 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4909 if (TREE_OPERAND (node, 2))
4910 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4911 if (TREE_OPERAND (node, 3))
4912 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4914 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4915 FIELD_DECL, apparently. The G++ front end can put something else
4916 there, at least temporarily. */
4917 else if (TREE_CODE (node) == COMPONENT_REF
4918 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4920 if (TREE_OPERAND (node, 2))
4921 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4925 node = lang_hooks.expr_to_decl (node, &tc, &se);
4927 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4928 the address, since &(*a)->b is a form of addition. If it's a constant, the
4929 address is constant too. If it's a decl, its address is constant if the
4930 decl is static. Everything else is not constant and, furthermore,
4931 taking the address of a volatile variable is not volatile. */
4932 if (TREE_CODE (node) == INDIRECT_REF
4933 || TREE_CODE (node) == MEM_REF)
4934 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4935 else if (CONSTANT_CLASS_P (node))
4937 else if (DECL_P (node))
4938 tc &= (staticp (node) != NULL_TREE);
4939 else
4941 tc = false;
4942 se |= TREE_SIDE_EFFECTS (node);
4946 TREE_CONSTANT (t) = tc;
4947 TREE_SIDE_EFFECTS (t) = se;
4948 #undef UPDATE_FLAGS
4951 /* Build an expression of code CODE, data type TYPE, and operands as
4952 specified. Expressions and reference nodes can be created this way.
4953 Constants, decls, types and misc nodes cannot be.
4955 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4956 enough for all extant tree codes. */
4958 tree
4959 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4961 tree t;
4963 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4965 t = make_node (code PASS_MEM_STAT);
4966 TREE_TYPE (t) = tt;
4968 return t;
4971 tree
4972 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4974 int length = sizeof (struct tree_exp);
4975 tree t;
4977 record_node_allocation_statistics (code, length);
4979 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4981 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4983 memset (t, 0, sizeof (struct tree_common));
4985 TREE_SET_CODE (t, code);
4987 TREE_TYPE (t) = type;
4988 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4989 TREE_OPERAND (t, 0) = node;
4990 if (node && !TYPE_P (node))
4992 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4993 TREE_READONLY (t) = TREE_READONLY (node);
4996 if (TREE_CODE_CLASS (code) == tcc_statement)
4998 if (code != DEBUG_BEGIN_STMT)
4999 TREE_SIDE_EFFECTS (t) = 1;
5001 else switch (code)
5003 case VA_ARG_EXPR:
5004 /* All of these have side-effects, no matter what their
5005 operands are. */
5006 TREE_SIDE_EFFECTS (t) = 1;
5007 TREE_READONLY (t) = 0;
5008 break;
5010 case INDIRECT_REF:
5011 /* Whether a dereference is readonly has nothing to do with whether
5012 its operand is readonly. */
5013 TREE_READONLY (t) = 0;
5014 break;
5016 case ADDR_EXPR:
5017 if (node)
5018 recompute_tree_invariant_for_addr_expr (t);
5019 break;
5021 default:
5022 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5023 && node && !TYPE_P (node)
5024 && TREE_CONSTANT (node))
5025 TREE_CONSTANT (t) = 1;
5026 if (TREE_CODE_CLASS (code) == tcc_reference
5027 && node && TREE_THIS_VOLATILE (node))
5028 TREE_THIS_VOLATILE (t) = 1;
5029 break;
5032 return t;
5035 #define PROCESS_ARG(N) \
5036 do { \
5037 TREE_OPERAND (t, N) = arg##N; \
5038 if (arg##N &&!TYPE_P (arg##N)) \
5040 if (TREE_SIDE_EFFECTS (arg##N)) \
5041 side_effects = 1; \
5042 if (!TREE_READONLY (arg##N) \
5043 && !CONSTANT_CLASS_P (arg##N)) \
5044 (void) (read_only = 0); \
5045 if (!TREE_CONSTANT (arg##N)) \
5046 (void) (constant = 0); \
5048 } while (0)
5050 tree
5051 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5053 bool constant, read_only, side_effects, div_by_zero;
5054 tree t;
5056 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5058 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5059 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5060 /* When sizetype precision doesn't match that of pointers
5061 we need to be able to build explicit extensions or truncations
5062 of the offset argument. */
5063 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5064 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5065 && TREE_CODE (arg1) == INTEGER_CST);
5067 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5068 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5069 && ptrofftype_p (TREE_TYPE (arg1)));
5071 t = make_node (code PASS_MEM_STAT);
5072 TREE_TYPE (t) = tt;
5074 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5075 result based on those same flags for the arguments. But if the
5076 arguments aren't really even `tree' expressions, we shouldn't be trying
5077 to do this. */
5079 /* Expressions without side effects may be constant if their
5080 arguments are as well. */
5081 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5082 || TREE_CODE_CLASS (code) == tcc_binary);
5083 read_only = 1;
5084 side_effects = TREE_SIDE_EFFECTS (t);
5086 switch (code)
5088 case TRUNC_DIV_EXPR:
5089 case CEIL_DIV_EXPR:
5090 case FLOOR_DIV_EXPR:
5091 case ROUND_DIV_EXPR:
5092 case EXACT_DIV_EXPR:
5093 case CEIL_MOD_EXPR:
5094 case FLOOR_MOD_EXPR:
5095 case ROUND_MOD_EXPR:
5096 case TRUNC_MOD_EXPR:
5097 div_by_zero = integer_zerop (arg1);
5098 break;
5099 default:
5100 div_by_zero = false;
5103 PROCESS_ARG (0);
5104 PROCESS_ARG (1);
5106 TREE_SIDE_EFFECTS (t) = side_effects;
5107 if (code == MEM_REF)
5109 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5111 tree o = TREE_OPERAND (arg0, 0);
5112 TREE_READONLY (t) = TREE_READONLY (o);
5113 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5116 else
5118 TREE_READONLY (t) = read_only;
5119 /* Don't mark X / 0 as constant. */
5120 TREE_CONSTANT (t) = constant && !div_by_zero;
5121 TREE_THIS_VOLATILE (t)
5122 = (TREE_CODE_CLASS (code) == tcc_reference
5123 && arg0 && TREE_THIS_VOLATILE (arg0));
5126 return t;
5130 tree
5131 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5132 tree arg2 MEM_STAT_DECL)
5134 bool constant, read_only, side_effects;
5135 tree t;
5137 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5138 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5140 t = make_node (code PASS_MEM_STAT);
5141 TREE_TYPE (t) = tt;
5143 read_only = 1;
5145 /* As a special exception, if COND_EXPR has NULL branches, we
5146 assume that it is a gimple statement and always consider
5147 it to have side effects. */
5148 if (code == COND_EXPR
5149 && tt == void_type_node
5150 && arg1 == NULL_TREE
5151 && arg2 == NULL_TREE)
5152 side_effects = true;
5153 else
5154 side_effects = TREE_SIDE_EFFECTS (t);
5156 PROCESS_ARG (0);
5157 PROCESS_ARG (1);
5158 PROCESS_ARG (2);
5160 if (code == COND_EXPR)
5161 TREE_READONLY (t) = read_only;
5163 TREE_SIDE_EFFECTS (t) = side_effects;
5164 TREE_THIS_VOLATILE (t)
5165 = (TREE_CODE_CLASS (code) == tcc_reference
5166 && arg0 && TREE_THIS_VOLATILE (arg0));
5168 return t;
5171 tree
5172 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5173 tree arg2, tree arg3 MEM_STAT_DECL)
5175 bool constant, read_only, side_effects;
5176 tree t;
5178 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5180 t = make_node (code PASS_MEM_STAT);
5181 TREE_TYPE (t) = tt;
5183 side_effects = TREE_SIDE_EFFECTS (t);
5185 PROCESS_ARG (0);
5186 PROCESS_ARG (1);
5187 PROCESS_ARG (2);
5188 PROCESS_ARG (3);
5190 TREE_SIDE_EFFECTS (t) = side_effects;
5191 TREE_THIS_VOLATILE (t)
5192 = (TREE_CODE_CLASS (code) == tcc_reference
5193 && arg0 && TREE_THIS_VOLATILE (arg0));
5195 return t;
5198 tree
5199 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5200 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5202 bool constant, read_only, side_effects;
5203 tree t;
5205 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5207 t = make_node (code PASS_MEM_STAT);
5208 TREE_TYPE (t) = tt;
5210 side_effects = TREE_SIDE_EFFECTS (t);
5212 PROCESS_ARG (0);
5213 PROCESS_ARG (1);
5214 PROCESS_ARG (2);
5215 PROCESS_ARG (3);
5216 PROCESS_ARG (4);
5218 TREE_SIDE_EFFECTS (t) = side_effects;
5219 if (code == TARGET_MEM_REF)
5221 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5223 tree o = TREE_OPERAND (arg0, 0);
5224 TREE_READONLY (t) = TREE_READONLY (o);
5225 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5228 else
5229 TREE_THIS_VOLATILE (t)
5230 = (TREE_CODE_CLASS (code) == tcc_reference
5231 && arg0 && TREE_THIS_VOLATILE (arg0));
5233 return t;
5236 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5237 on the pointer PTR. */
5239 tree
5240 build_simple_mem_ref_loc (location_t loc, tree ptr)
5242 poly_int64 offset = 0;
5243 tree ptype = TREE_TYPE (ptr);
5244 tree tem;
5245 /* For convenience allow addresses that collapse to a simple base
5246 and offset. */
5247 if (TREE_CODE (ptr) == ADDR_EXPR
5248 && (handled_component_p (TREE_OPERAND (ptr, 0))
5249 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5251 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5252 gcc_assert (ptr);
5253 if (TREE_CODE (ptr) == MEM_REF)
5255 offset += mem_ref_offset (ptr).force_shwi ();
5256 ptr = TREE_OPERAND (ptr, 0);
5258 else
5259 ptr = build_fold_addr_expr (ptr);
5260 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5262 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5263 ptr, build_int_cst (ptype, offset));
5264 SET_EXPR_LOCATION (tem, loc);
5265 return tem;
5268 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5270 poly_offset_int
5271 mem_ref_offset (const_tree t)
5273 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5274 SIGNED);
5277 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5278 offsetted by OFFSET units. */
5280 tree
5281 build_invariant_address (tree type, tree base, poly_int64 offset)
5283 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5284 build_fold_addr_expr (base),
5285 build_int_cst (ptr_type_node, offset));
5286 tree addr = build1 (ADDR_EXPR, type, ref);
5287 recompute_tree_invariant_for_addr_expr (addr);
5288 return addr;
5291 /* Similar except don't specify the TREE_TYPE
5292 and leave the TREE_SIDE_EFFECTS as 0.
5293 It is permissible for arguments to be null,
5294 or even garbage if their values do not matter. */
5296 tree
5297 build_nt (enum tree_code code, ...)
5299 tree t;
5300 int length;
5301 int i;
5302 va_list p;
5304 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5306 va_start (p, code);
5308 t = make_node (code);
5309 length = TREE_CODE_LENGTH (code);
5311 for (i = 0; i < length; i++)
5312 TREE_OPERAND (t, i) = va_arg (p, tree);
5314 va_end (p);
5315 return t;
5318 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5319 tree vec. */
5321 tree
5322 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5324 tree ret, t;
5325 unsigned int ix;
5327 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5328 CALL_EXPR_FN (ret) = fn;
5329 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5330 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5331 CALL_EXPR_ARG (ret, ix) = t;
5332 return ret;
5335 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5336 and data type TYPE.
5337 We do NOT enter this node in any sort of symbol table.
5339 LOC is the location of the decl.
5341 layout_decl is used to set up the decl's storage layout.
5342 Other slots are initialized to 0 or null pointers. */
5344 tree
5345 build_decl (location_t loc, enum tree_code code, tree name,
5346 tree type MEM_STAT_DECL)
5348 tree t;
5350 t = make_node (code PASS_MEM_STAT);
5351 DECL_SOURCE_LOCATION (t) = loc;
5353 /* if (type == error_mark_node)
5354 type = integer_type_node; */
5355 /* That is not done, deliberately, so that having error_mark_node
5356 as the type can suppress useless errors in the use of this variable. */
5358 DECL_NAME (t) = name;
5359 TREE_TYPE (t) = type;
5361 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5362 layout_decl (t, 0);
5364 return t;
5367 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5369 tree
5370 build_debug_expr_decl (tree type)
5372 tree vexpr = make_node (DEBUG_EXPR_DECL);
5373 DECL_ARTIFICIAL (vexpr) = 1;
5374 TREE_TYPE (vexpr) = type;
5375 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5376 return vexpr;
5379 /* Builds and returns function declaration with NAME and TYPE. */
5381 tree
5382 build_fn_decl (const char *name, tree type)
5384 tree id = get_identifier (name);
5385 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5387 DECL_EXTERNAL (decl) = 1;
5388 TREE_PUBLIC (decl) = 1;
5389 DECL_ARTIFICIAL (decl) = 1;
5390 TREE_NOTHROW (decl) = 1;
5392 return decl;
5395 vec<tree, va_gc> *all_translation_units;
5397 /* Builds a new translation-unit decl with name NAME, queues it in the
5398 global list of translation-unit decls and returns it. */
5400 tree
5401 build_translation_unit_decl (tree name)
5403 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5404 name, NULL_TREE);
5405 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5406 vec_safe_push (all_translation_units, tu);
5407 return tu;
5411 /* BLOCK nodes are used to represent the structure of binding contours
5412 and declarations, once those contours have been exited and their contents
5413 compiled. This information is used for outputting debugging info. */
5415 tree
5416 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5418 tree block = make_node (BLOCK);
5420 BLOCK_VARS (block) = vars;
5421 BLOCK_SUBBLOCKS (block) = subblocks;
5422 BLOCK_SUPERCONTEXT (block) = supercontext;
5423 BLOCK_CHAIN (block) = chain;
5424 return block;
5428 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5430 LOC is the location to use in tree T. */
5432 void
5433 protected_set_expr_location (tree t, location_t loc)
5435 if (CAN_HAVE_LOCATION_P (t))
5436 SET_EXPR_LOCATION (t, loc);
5437 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5439 t = expr_single (t);
5440 if (t && CAN_HAVE_LOCATION_P (t))
5441 SET_EXPR_LOCATION (t, loc);
5445 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5446 UNKNOWN_LOCATION. */
5448 void
5449 protected_set_expr_location_if_unset (tree t, location_t loc)
5451 t = expr_single (t);
5452 if (t && !EXPR_HAS_LOCATION (t))
5453 protected_set_expr_location (t, loc);
5456 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5457 of the various TYPE_QUAL values. */
5459 static void
5460 set_type_quals (tree type, int type_quals)
5462 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5463 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5464 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5465 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5466 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5469 /* Returns true iff CAND and BASE have equivalent language-specific
5470 qualifiers. */
5472 bool
5473 check_lang_type (const_tree cand, const_tree base)
5475 if (lang_hooks.types.type_hash_eq == NULL)
5476 return true;
5477 /* type_hash_eq currently only applies to these types. */
5478 if (TREE_CODE (cand) != FUNCTION_TYPE
5479 && TREE_CODE (cand) != METHOD_TYPE)
5480 return true;
5481 return lang_hooks.types.type_hash_eq (cand, base);
5484 /* This function checks to see if TYPE matches the size one of the built-in
5485 atomic types, and returns that core atomic type. */
5487 static tree
5488 find_atomic_core_type (const_tree type)
5490 tree base_atomic_type;
5492 /* Only handle complete types. */
5493 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5494 return NULL_TREE;
5496 switch (tree_to_uhwi (TYPE_SIZE (type)))
5498 case 8:
5499 base_atomic_type = atomicQI_type_node;
5500 break;
5502 case 16:
5503 base_atomic_type = atomicHI_type_node;
5504 break;
5506 case 32:
5507 base_atomic_type = atomicSI_type_node;
5508 break;
5510 case 64:
5511 base_atomic_type = atomicDI_type_node;
5512 break;
5514 case 128:
5515 base_atomic_type = atomicTI_type_node;
5516 break;
5518 default:
5519 base_atomic_type = NULL_TREE;
5522 return base_atomic_type;
5525 /* Returns true iff unqualified CAND and BASE are equivalent. */
5527 bool
5528 check_base_type (const_tree cand, const_tree base)
5530 if (TYPE_NAME (cand) != TYPE_NAME (base)
5531 /* Apparently this is needed for Objective-C. */
5532 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5533 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5534 TYPE_ATTRIBUTES (base)))
5535 return false;
5536 /* Check alignment. */
5537 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5538 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5539 return true;
5540 /* Atomic types increase minimal alignment. We must to do so as well
5541 or we get duplicated canonical types. See PR88686. */
5542 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5544 /* See if this object can map to a basic atomic type. */
5545 tree atomic_type = find_atomic_core_type (cand);
5546 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5547 return true;
5549 return false;
5552 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5554 bool
5555 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5557 return (TYPE_QUALS (cand) == type_quals
5558 && check_base_type (cand, base)
5559 && check_lang_type (cand, base));
5562 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5564 static bool
5565 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5567 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5568 && TYPE_NAME (cand) == TYPE_NAME (base)
5569 /* Apparently this is needed for Objective-C. */
5570 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5571 /* Check alignment. */
5572 && TYPE_ALIGN (cand) == align
5573 /* Check this is a user-aligned type as build_aligned_type
5574 would create. */
5575 && TYPE_USER_ALIGN (cand)
5576 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5577 TYPE_ATTRIBUTES (base))
5578 && check_lang_type (cand, base));
5581 /* Return a version of the TYPE, qualified as indicated by the
5582 TYPE_QUALS, if one exists. If no qualified version exists yet,
5583 return NULL_TREE. */
5585 tree
5586 get_qualified_type (tree type, int type_quals)
5588 if (TYPE_QUALS (type) == type_quals)
5589 return type;
5591 tree mv = TYPE_MAIN_VARIANT (type);
5592 if (check_qualified_type (mv, type, type_quals))
5593 return mv;
5595 /* Search the chain of variants to see if there is already one there just
5596 like the one we need to have. If so, use that existing one. We must
5597 preserve the TYPE_NAME, since there is code that depends on this. */
5598 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5599 if (check_qualified_type (*tp, type, type_quals))
5601 /* Put the found variant at the head of the variant list so
5602 frequently searched variants get found faster. The C++ FE
5603 benefits greatly from this. */
5604 tree t = *tp;
5605 *tp = TYPE_NEXT_VARIANT (t);
5606 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5607 TYPE_NEXT_VARIANT (mv) = t;
5608 return t;
5611 return NULL_TREE;
5614 /* Like get_qualified_type, but creates the type if it does not
5615 exist. This function never returns NULL_TREE. */
5617 tree
5618 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5620 tree t;
5622 /* See if we already have the appropriate qualified variant. */
5623 t = get_qualified_type (type, type_quals);
5625 /* If not, build it. */
5626 if (!t)
5628 t = build_variant_type_copy (type PASS_MEM_STAT);
5629 set_type_quals (t, type_quals);
5631 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5633 /* See if this object can map to a basic atomic type. */
5634 tree atomic_type = find_atomic_core_type (type);
5635 if (atomic_type)
5637 /* Ensure the alignment of this type is compatible with
5638 the required alignment of the atomic type. */
5639 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5640 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5644 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5645 /* Propagate structural equality. */
5646 SET_TYPE_STRUCTURAL_EQUALITY (t);
5647 else if (TYPE_CANONICAL (type) != type)
5648 /* Build the underlying canonical type, since it is different
5649 from TYPE. */
5651 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5652 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5654 else
5655 /* T is its own canonical type. */
5656 TYPE_CANONICAL (t) = t;
5660 return t;
5663 /* Create a variant of type T with alignment ALIGN. */
5665 tree
5666 build_aligned_type (tree type, unsigned int align)
5668 tree t;
5670 if (TYPE_PACKED (type)
5671 || TYPE_ALIGN (type) == align)
5672 return type;
5674 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5675 if (check_aligned_type (t, type, align))
5676 return t;
5678 t = build_variant_type_copy (type);
5679 SET_TYPE_ALIGN (t, align);
5680 TYPE_USER_ALIGN (t) = 1;
5682 return t;
5685 /* Create a new distinct copy of TYPE. The new type is made its own
5686 MAIN_VARIANT. If TYPE requires structural equality checks, the
5687 resulting type requires structural equality checks; otherwise, its
5688 TYPE_CANONICAL points to itself. */
5690 tree
5691 build_distinct_type_copy (tree type MEM_STAT_DECL)
5693 tree t = copy_node (type PASS_MEM_STAT);
5695 TYPE_POINTER_TO (t) = 0;
5696 TYPE_REFERENCE_TO (t) = 0;
5698 /* Set the canonical type either to a new equivalence class, or
5699 propagate the need for structural equality checks. */
5700 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5701 SET_TYPE_STRUCTURAL_EQUALITY (t);
5702 else
5703 TYPE_CANONICAL (t) = t;
5705 /* Make it its own variant. */
5706 TYPE_MAIN_VARIANT (t) = t;
5707 TYPE_NEXT_VARIANT (t) = 0;
5709 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5710 whose TREE_TYPE is not t. This can also happen in the Ada
5711 frontend when using subtypes. */
5713 return t;
5716 /* Create a new variant of TYPE, equivalent but distinct. This is so
5717 the caller can modify it. TYPE_CANONICAL for the return type will
5718 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5719 are considered equal by the language itself (or that both types
5720 require structural equality checks). */
5722 tree
5723 build_variant_type_copy (tree type MEM_STAT_DECL)
5725 tree t, m = TYPE_MAIN_VARIANT (type);
5727 t = build_distinct_type_copy (type PASS_MEM_STAT);
5729 /* Since we're building a variant, assume that it is a non-semantic
5730 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5731 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5732 /* Type variants have no alias set defined. */
5733 TYPE_ALIAS_SET (t) = -1;
5735 /* Add the new type to the chain of variants of TYPE. */
5736 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5737 TYPE_NEXT_VARIANT (m) = t;
5738 TYPE_MAIN_VARIANT (t) = m;
5740 return t;
5743 /* Return true if the from tree in both tree maps are equal. */
5746 tree_map_base_eq (const void *va, const void *vb)
5748 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5749 *const b = (const struct tree_map_base *) vb;
5750 return (a->from == b->from);
5753 /* Hash a from tree in a tree_base_map. */
5755 unsigned int
5756 tree_map_base_hash (const void *item)
5758 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5761 /* Return true if this tree map structure is marked for garbage collection
5762 purposes. We simply return true if the from tree is marked, so that this
5763 structure goes away when the from tree goes away. */
5766 tree_map_base_marked_p (const void *p)
5768 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5771 /* Hash a from tree in a tree_map. */
5773 unsigned int
5774 tree_map_hash (const void *item)
5776 return (((const struct tree_map *) item)->hash);
5779 /* Hash a from tree in a tree_decl_map. */
5781 unsigned int
5782 tree_decl_map_hash (const void *item)
5784 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5787 /* Return the initialization priority for DECL. */
5789 priority_type
5790 decl_init_priority_lookup (tree decl)
5792 symtab_node *snode = symtab_node::get (decl);
5794 if (!snode)
5795 return DEFAULT_INIT_PRIORITY;
5796 return
5797 snode->get_init_priority ();
5800 /* Return the finalization priority for DECL. */
5802 priority_type
5803 decl_fini_priority_lookup (tree decl)
5805 cgraph_node *node = cgraph_node::get (decl);
5807 if (!node)
5808 return DEFAULT_INIT_PRIORITY;
5809 return
5810 node->get_fini_priority ();
5813 /* Set the initialization priority for DECL to PRIORITY. */
5815 void
5816 decl_init_priority_insert (tree decl, priority_type priority)
5818 struct symtab_node *snode;
5820 if (priority == DEFAULT_INIT_PRIORITY)
5822 snode = symtab_node::get (decl);
5823 if (!snode)
5824 return;
5826 else if (VAR_P (decl))
5827 snode = varpool_node::get_create (decl);
5828 else
5829 snode = cgraph_node::get_create (decl);
5830 snode->set_init_priority (priority);
5833 /* Set the finalization priority for DECL to PRIORITY. */
5835 void
5836 decl_fini_priority_insert (tree decl, priority_type priority)
5838 struct cgraph_node *node;
5840 if (priority == DEFAULT_INIT_PRIORITY)
5842 node = cgraph_node::get (decl);
5843 if (!node)
5844 return;
5846 else
5847 node = cgraph_node::get_create (decl);
5848 node->set_fini_priority (priority);
5851 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5853 static void
5854 print_debug_expr_statistics (void)
5856 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5857 (long) debug_expr_for_decl->size (),
5858 (long) debug_expr_for_decl->elements (),
5859 debug_expr_for_decl->collisions ());
5862 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5864 static void
5865 print_value_expr_statistics (void)
5867 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5868 (long) value_expr_for_decl->size (),
5869 (long) value_expr_for_decl->elements (),
5870 value_expr_for_decl->collisions ());
5873 /* Lookup a debug expression for FROM, and return it if we find one. */
5875 tree
5876 decl_debug_expr_lookup (tree from)
5878 struct tree_decl_map *h, in;
5879 in.base.from = from;
5881 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5882 if (h)
5883 return h->to;
5884 return NULL_TREE;
5887 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5889 void
5890 decl_debug_expr_insert (tree from, tree to)
5892 struct tree_decl_map *h;
5894 h = ggc_alloc<tree_decl_map> ();
5895 h->base.from = from;
5896 h->to = to;
5897 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5900 /* Lookup a value expression for FROM, and return it if we find one. */
5902 tree
5903 decl_value_expr_lookup (tree from)
5905 struct tree_decl_map *h, in;
5906 in.base.from = from;
5908 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5909 if (h)
5910 return h->to;
5911 return NULL_TREE;
5914 /* Insert a mapping FROM->TO in the value expression hashtable. */
5916 void
5917 decl_value_expr_insert (tree from, tree to)
5919 struct tree_decl_map *h;
5921 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5922 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5924 h = ggc_alloc<tree_decl_map> ();
5925 h->base.from = from;
5926 h->to = to;
5927 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5930 /* Lookup a vector of debug arguments for FROM, and return it if we
5931 find one. */
5933 vec<tree, va_gc> **
5934 decl_debug_args_lookup (tree from)
5936 struct tree_vec_map *h, in;
5938 if (!DECL_HAS_DEBUG_ARGS_P (from))
5939 return NULL;
5940 gcc_checking_assert (debug_args_for_decl != NULL);
5941 in.base.from = from;
5942 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5943 if (h)
5944 return &h->to;
5945 return NULL;
5948 /* Insert a mapping FROM->empty vector of debug arguments in the value
5949 expression hashtable. */
5951 vec<tree, va_gc> **
5952 decl_debug_args_insert (tree from)
5954 struct tree_vec_map *h;
5955 tree_vec_map **loc;
5957 if (DECL_HAS_DEBUG_ARGS_P (from))
5958 return decl_debug_args_lookup (from);
5959 if (debug_args_for_decl == NULL)
5960 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5961 h = ggc_alloc<tree_vec_map> ();
5962 h->base.from = from;
5963 h->to = NULL;
5964 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5965 *loc = h;
5966 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5967 return &h->to;
5970 /* Hashing of types so that we don't make duplicates.
5971 The entry point is `type_hash_canon'. */
5973 /* Generate the default hash code for TYPE. This is designed for
5974 speed, rather than maximum entropy. */
5976 hashval_t
5977 type_hash_canon_hash (tree type)
5979 inchash::hash hstate;
5981 hstate.add_int (TREE_CODE (type));
5983 if (TREE_TYPE (type))
5984 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5986 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5987 /* Just the identifier is adequate to distinguish. */
5988 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5990 switch (TREE_CODE (type))
5992 case METHOD_TYPE:
5993 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5994 /* FALLTHROUGH. */
5995 case FUNCTION_TYPE:
5996 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5997 if (TREE_VALUE (t) != error_mark_node)
5998 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5999 break;
6001 case OFFSET_TYPE:
6002 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6003 break;
6005 case ARRAY_TYPE:
6007 if (TYPE_DOMAIN (type))
6008 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6009 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6011 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6012 hstate.add_object (typeless);
6015 break;
6017 case INTEGER_TYPE:
6019 tree t = TYPE_MAX_VALUE (type);
6020 if (!t)
6021 t = TYPE_MIN_VALUE (type);
6022 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6023 hstate.add_object (TREE_INT_CST_ELT (t, i));
6024 break;
6027 case REAL_TYPE:
6028 case FIXED_POINT_TYPE:
6030 unsigned prec = TYPE_PRECISION (type);
6031 hstate.add_object (prec);
6032 break;
6035 case VECTOR_TYPE:
6036 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6037 break;
6039 default:
6040 break;
6043 return hstate.end ();
6046 /* These are the Hashtable callback functions. */
6048 /* Returns true iff the types are equivalent. */
6050 bool
6051 type_cache_hasher::equal (type_hash *a, type_hash *b)
6053 /* First test the things that are the same for all types. */
6054 if (a->hash != b->hash
6055 || TREE_CODE (a->type) != TREE_CODE (b->type)
6056 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6057 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6058 TYPE_ATTRIBUTES (b->type))
6059 || (TREE_CODE (a->type) != COMPLEX_TYPE
6060 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6061 return 0;
6063 /* Be careful about comparing arrays before and after the element type
6064 has been completed; don't compare TYPE_ALIGN unless both types are
6065 complete. */
6066 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6067 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6068 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6069 return 0;
6071 switch (TREE_CODE (a->type))
6073 case VOID_TYPE:
6074 case OPAQUE_TYPE:
6075 case COMPLEX_TYPE:
6076 case POINTER_TYPE:
6077 case REFERENCE_TYPE:
6078 case NULLPTR_TYPE:
6079 return 1;
6081 case VECTOR_TYPE:
6082 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6083 TYPE_VECTOR_SUBPARTS (b->type));
6085 case ENUMERAL_TYPE:
6086 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6087 && !(TYPE_VALUES (a->type)
6088 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6089 && TYPE_VALUES (b->type)
6090 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6091 && type_list_equal (TYPE_VALUES (a->type),
6092 TYPE_VALUES (b->type))))
6093 return 0;
6095 /* fall through */
6097 case INTEGER_TYPE:
6098 case REAL_TYPE:
6099 case BOOLEAN_TYPE:
6100 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6101 return false;
6102 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6103 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6104 TYPE_MAX_VALUE (b->type)))
6105 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6106 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6107 TYPE_MIN_VALUE (b->type))));
6109 case FIXED_POINT_TYPE:
6110 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6112 case OFFSET_TYPE:
6113 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6115 case METHOD_TYPE:
6116 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6117 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6118 || (TYPE_ARG_TYPES (a->type)
6119 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6120 && TYPE_ARG_TYPES (b->type)
6121 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6122 && type_list_equal (TYPE_ARG_TYPES (a->type),
6123 TYPE_ARG_TYPES (b->type)))))
6124 break;
6125 return 0;
6126 case ARRAY_TYPE:
6127 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6128 where the flag should be inherited from the element type
6129 and can change after ARRAY_TYPEs are created; on non-aggregates
6130 compare it and hash it, scalars will never have that flag set
6131 and we need to differentiate between arrays created by different
6132 front-ends or middle-end created arrays. */
6133 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6134 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6135 || (TYPE_TYPELESS_STORAGE (a->type)
6136 == TYPE_TYPELESS_STORAGE (b->type))));
6138 case RECORD_TYPE:
6139 case UNION_TYPE:
6140 case QUAL_UNION_TYPE:
6141 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6142 || (TYPE_FIELDS (a->type)
6143 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6144 && TYPE_FIELDS (b->type)
6145 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6146 && type_list_equal (TYPE_FIELDS (a->type),
6147 TYPE_FIELDS (b->type))));
6149 case FUNCTION_TYPE:
6150 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6151 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6152 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6153 || (TYPE_ARG_TYPES (a->type)
6154 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6155 && TYPE_ARG_TYPES (b->type)
6156 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6157 && type_list_equal (TYPE_ARG_TYPES (a->type),
6158 TYPE_ARG_TYPES (b->type))))
6159 break;
6160 return 0;
6162 default:
6163 return 0;
6166 if (lang_hooks.types.type_hash_eq != NULL)
6167 return lang_hooks.types.type_hash_eq (a->type, b->type);
6169 return 1;
6172 /* Given TYPE, and HASHCODE its hash code, return the canonical
6173 object for an identical type if one already exists.
6174 Otherwise, return TYPE, and record it as the canonical object.
6176 To use this function, first create a type of the sort you want.
6177 Then compute its hash code from the fields of the type that
6178 make it different from other similar types.
6179 Then call this function and use the value. */
6181 tree
6182 type_hash_canon (unsigned int hashcode, tree type)
6184 type_hash in;
6185 type_hash **loc;
6187 /* The hash table only contains main variants, so ensure that's what we're
6188 being passed. */
6189 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6191 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6192 must call that routine before comparing TYPE_ALIGNs. */
6193 layout_type (type);
6195 in.hash = hashcode;
6196 in.type = type;
6198 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6199 if (*loc)
6201 tree t1 = ((type_hash *) *loc)->type;
6202 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6203 && t1 != type);
6204 if (TYPE_UID (type) + 1 == next_type_uid)
6205 --next_type_uid;
6206 /* Free also min/max values and the cache for integer
6207 types. This can't be done in free_node, as LTO frees
6208 those on its own. */
6209 if (TREE_CODE (type) == INTEGER_TYPE)
6211 if (TYPE_MIN_VALUE (type)
6212 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6214 /* Zero is always in TYPE_CACHED_VALUES. */
6215 if (! TYPE_UNSIGNED (type))
6216 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6217 ggc_free (TYPE_MIN_VALUE (type));
6219 if (TYPE_MAX_VALUE (type)
6220 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6222 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6223 ggc_free (TYPE_MAX_VALUE (type));
6225 if (TYPE_CACHED_VALUES_P (type))
6226 ggc_free (TYPE_CACHED_VALUES (type));
6228 free_node (type);
6229 return t1;
6231 else
6233 struct type_hash *h;
6235 h = ggc_alloc<type_hash> ();
6236 h->hash = hashcode;
6237 h->type = type;
6238 *loc = h;
6240 return type;
6244 static void
6245 print_type_hash_statistics (void)
6247 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6248 (long) type_hash_table->size (),
6249 (long) type_hash_table->elements (),
6250 type_hash_table->collisions ());
6253 /* Given two lists of types
6254 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6255 return 1 if the lists contain the same types in the same order.
6256 Also, the TREE_PURPOSEs must match. */
6258 bool
6259 type_list_equal (const_tree l1, const_tree l2)
6261 const_tree t1, t2;
6263 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6264 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6265 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6266 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6267 && (TREE_TYPE (TREE_PURPOSE (t1))
6268 == TREE_TYPE (TREE_PURPOSE (t2))))))
6269 return false;
6271 return t1 == t2;
6274 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6275 given by TYPE. If the argument list accepts variable arguments,
6276 then this function counts only the ordinary arguments. */
6279 type_num_arguments (const_tree fntype)
6281 int i = 0;
6283 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6284 /* If the function does not take a variable number of arguments,
6285 the last element in the list will have type `void'. */
6286 if (VOID_TYPE_P (TREE_VALUE (t)))
6287 break;
6288 else
6289 ++i;
6291 return i;
6294 /* Return the type of the function TYPE's argument ARGNO if known.
6295 For vararg function's where ARGNO refers to one of the variadic
6296 arguments return null. Otherwise, return a void_type_node for
6297 out-of-bounds ARGNO. */
6299 tree
6300 type_argument_type (const_tree fntype, unsigned argno)
6302 /* Treat zero the same as an out-of-bounds argument number. */
6303 if (!argno)
6304 return void_type_node;
6306 function_args_iterator iter;
6308 tree argtype;
6309 unsigned i = 1;
6310 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6312 /* A vararg function's argument list ends in a null. Otherwise,
6313 an ordinary function's argument list ends with void. Return
6314 null if ARGNO refers to a vararg argument, void_type_node if
6315 it's out of bounds, and the formal argument type otherwise. */
6316 if (!argtype)
6317 break;
6319 if (i == argno || VOID_TYPE_P (argtype))
6320 return argtype;
6322 ++i;
6325 return NULL_TREE;
6328 /* Nonzero if integer constants T1 and T2
6329 represent the same constant value. */
6332 tree_int_cst_equal (const_tree t1, const_tree t2)
6334 if (t1 == t2)
6335 return 1;
6337 if (t1 == 0 || t2 == 0)
6338 return 0;
6340 STRIP_ANY_LOCATION_WRAPPER (t1);
6341 STRIP_ANY_LOCATION_WRAPPER (t2);
6343 if (TREE_CODE (t1) == INTEGER_CST
6344 && TREE_CODE (t2) == INTEGER_CST
6345 && wi::to_widest (t1) == wi::to_widest (t2))
6346 return 1;
6348 return 0;
6351 /* Return true if T is an INTEGER_CST whose numerical value (extended
6352 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6354 bool
6355 tree_fits_shwi_p (const_tree t)
6357 return (t != NULL_TREE
6358 && TREE_CODE (t) == INTEGER_CST
6359 && wi::fits_shwi_p (wi::to_widest (t)));
6362 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6363 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6365 bool
6366 tree_fits_poly_int64_p (const_tree t)
6368 if (t == NULL_TREE)
6369 return false;
6370 if (POLY_INT_CST_P (t))
6372 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6373 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6374 return false;
6375 return true;
6377 return (TREE_CODE (t) == INTEGER_CST
6378 && wi::fits_shwi_p (wi::to_widest (t)));
6381 /* Return true if T is an INTEGER_CST whose numerical value (extended
6382 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6384 bool
6385 tree_fits_uhwi_p (const_tree t)
6387 return (t != NULL_TREE
6388 && TREE_CODE (t) == INTEGER_CST
6389 && wi::fits_uhwi_p (wi::to_widest (t)));
6392 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6393 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6395 bool
6396 tree_fits_poly_uint64_p (const_tree t)
6398 if (t == NULL_TREE)
6399 return false;
6400 if (POLY_INT_CST_P (t))
6402 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6403 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6404 return false;
6405 return true;
6407 return (TREE_CODE (t) == INTEGER_CST
6408 && wi::fits_uhwi_p (wi::to_widest (t)));
6411 /* T is an INTEGER_CST whose numerical value (extended according to
6412 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6413 HOST_WIDE_INT. */
6415 HOST_WIDE_INT
6416 tree_to_shwi (const_tree t)
6418 gcc_assert (tree_fits_shwi_p (t));
6419 return TREE_INT_CST_LOW (t);
6422 /* T is an INTEGER_CST whose numerical value (extended according to
6423 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6424 HOST_WIDE_INT. */
6426 unsigned HOST_WIDE_INT
6427 tree_to_uhwi (const_tree t)
6429 gcc_assert (tree_fits_uhwi_p (t));
6430 return TREE_INT_CST_LOW (t);
6433 /* Return the most significant (sign) bit of T. */
6436 tree_int_cst_sign_bit (const_tree t)
6438 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6440 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6443 /* Return an indication of the sign of the integer constant T.
6444 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6445 Note that -1 will never be returned if T's type is unsigned. */
6448 tree_int_cst_sgn (const_tree t)
6450 if (wi::to_wide (t) == 0)
6451 return 0;
6452 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6453 return 1;
6454 else if (wi::neg_p (wi::to_wide (t)))
6455 return -1;
6456 else
6457 return 1;
6460 /* Return the minimum number of bits needed to represent VALUE in a
6461 signed or unsigned type, UNSIGNEDP says which. */
6463 unsigned int
6464 tree_int_cst_min_precision (tree value, signop sgn)
6466 /* If the value is negative, compute its negative minus 1. The latter
6467 adjustment is because the absolute value of the largest negative value
6468 is one larger than the largest positive value. This is equivalent to
6469 a bit-wise negation, so use that operation instead. */
6471 if (tree_int_cst_sgn (value) < 0)
6472 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6474 /* Return the number of bits needed, taking into account the fact
6475 that we need one more bit for a signed than unsigned type.
6476 If value is 0 or -1, the minimum precision is 1 no matter
6477 whether unsignedp is true or false. */
6479 if (integer_zerop (value))
6480 return 1;
6481 else
6482 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6485 /* Return truthvalue of whether T1 is the same tree structure as T2.
6486 Return 1 if they are the same.
6487 Return 0 if they are understandably different.
6488 Return -1 if either contains tree structure not understood by
6489 this function. */
6492 simple_cst_equal (const_tree t1, const_tree t2)
6494 enum tree_code code1, code2;
6495 int cmp;
6496 int i;
6498 if (t1 == t2)
6499 return 1;
6500 if (t1 == 0 || t2 == 0)
6501 return 0;
6503 /* For location wrappers to be the same, they must be at the same
6504 source location (and wrap the same thing). */
6505 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6507 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6508 return 0;
6509 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6512 code1 = TREE_CODE (t1);
6513 code2 = TREE_CODE (t2);
6515 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6517 if (CONVERT_EXPR_CODE_P (code2)
6518 || code2 == NON_LVALUE_EXPR)
6519 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6520 else
6521 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6524 else if (CONVERT_EXPR_CODE_P (code2)
6525 || code2 == NON_LVALUE_EXPR)
6526 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6528 if (code1 != code2)
6529 return 0;
6531 switch (code1)
6533 case INTEGER_CST:
6534 return wi::to_widest (t1) == wi::to_widest (t2);
6536 case REAL_CST:
6537 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6539 case FIXED_CST:
6540 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6542 case STRING_CST:
6543 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6544 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6545 TREE_STRING_LENGTH (t1)));
6547 case CONSTRUCTOR:
6549 unsigned HOST_WIDE_INT idx;
6550 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6551 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6553 if (vec_safe_length (v1) != vec_safe_length (v2))
6554 return false;
6556 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6557 /* ??? Should we handle also fields here? */
6558 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6559 return false;
6560 return true;
6563 case SAVE_EXPR:
6564 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6566 case CALL_EXPR:
6567 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6568 if (cmp <= 0)
6569 return cmp;
6570 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6571 return 0;
6573 const_tree arg1, arg2;
6574 const_call_expr_arg_iterator iter1, iter2;
6575 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6576 arg2 = first_const_call_expr_arg (t2, &iter2);
6577 arg1 && arg2;
6578 arg1 = next_const_call_expr_arg (&iter1),
6579 arg2 = next_const_call_expr_arg (&iter2))
6581 cmp = simple_cst_equal (arg1, arg2);
6582 if (cmp <= 0)
6583 return cmp;
6585 return arg1 == arg2;
6588 case TARGET_EXPR:
6589 /* Special case: if either target is an unallocated VAR_DECL,
6590 it means that it's going to be unified with whatever the
6591 TARGET_EXPR is really supposed to initialize, so treat it
6592 as being equivalent to anything. */
6593 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6594 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6595 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6596 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6597 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6598 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6599 cmp = 1;
6600 else
6601 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6603 if (cmp <= 0)
6604 return cmp;
6606 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6608 case WITH_CLEANUP_EXPR:
6609 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6610 if (cmp <= 0)
6611 return cmp;
6613 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6615 case COMPONENT_REF:
6616 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6617 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6619 return 0;
6621 case VAR_DECL:
6622 case PARM_DECL:
6623 case CONST_DECL:
6624 case FUNCTION_DECL:
6625 return 0;
6627 default:
6628 if (POLY_INT_CST_P (t1))
6629 /* A false return means maybe_ne rather than known_ne. */
6630 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6631 TYPE_SIGN (TREE_TYPE (t1))),
6632 poly_widest_int::from (poly_int_cst_value (t2),
6633 TYPE_SIGN (TREE_TYPE (t2))));
6634 break;
6637 /* This general rule works for most tree codes. All exceptions should be
6638 handled above. If this is a language-specific tree code, we can't
6639 trust what might be in the operand, so say we don't know
6640 the situation. */
6641 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6642 return -1;
6644 switch (TREE_CODE_CLASS (code1))
6646 case tcc_unary:
6647 case tcc_binary:
6648 case tcc_comparison:
6649 case tcc_expression:
6650 case tcc_reference:
6651 case tcc_statement:
6652 cmp = 1;
6653 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6655 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6656 if (cmp <= 0)
6657 return cmp;
6660 return cmp;
6662 default:
6663 return -1;
6667 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6668 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6669 than U, respectively. */
6672 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6674 if (tree_int_cst_sgn (t) < 0)
6675 return -1;
6676 else if (!tree_fits_uhwi_p (t))
6677 return 1;
6678 else if (TREE_INT_CST_LOW (t) == u)
6679 return 0;
6680 else if (TREE_INT_CST_LOW (t) < u)
6681 return -1;
6682 else
6683 return 1;
6686 /* Return true if SIZE represents a constant size that is in bounds of
6687 what the middle-end and the backend accepts (covering not more than
6688 half of the address-space).
6689 When PERR is non-null, set *PERR on failure to the description of
6690 why SIZE is not valid. */
6692 bool
6693 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6695 if (POLY_INT_CST_P (size))
6697 if (TREE_OVERFLOW (size))
6698 return false;
6699 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6700 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6701 return false;
6702 return true;
6705 cst_size_error error;
6706 if (!perr)
6707 perr = &error;
6709 if (TREE_CODE (size) != INTEGER_CST)
6711 *perr = cst_size_not_constant;
6712 return false;
6715 if (TREE_OVERFLOW_P (size))
6717 *perr = cst_size_overflow;
6718 return false;
6721 if (tree_int_cst_sgn (size) < 0)
6723 *perr = cst_size_negative;
6724 return false;
6726 if (!tree_fits_uhwi_p (size)
6727 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6728 < wi::to_widest (size) * 2))
6730 *perr = cst_size_too_big;
6731 return false;
6734 return true;
6737 /* Return the precision of the type, or for a complex or vector type the
6738 precision of the type of its elements. */
6740 unsigned int
6741 element_precision (const_tree type)
6743 if (!TYPE_P (type))
6744 type = TREE_TYPE (type);
6745 enum tree_code code = TREE_CODE (type);
6746 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6747 type = TREE_TYPE (type);
6749 return TYPE_PRECISION (type);
6752 /* Return true if CODE represents an associative tree code. Otherwise
6753 return false. */
6754 bool
6755 associative_tree_code (enum tree_code code)
6757 switch (code)
6759 case BIT_IOR_EXPR:
6760 case BIT_AND_EXPR:
6761 case BIT_XOR_EXPR:
6762 case PLUS_EXPR:
6763 case MULT_EXPR:
6764 case MIN_EXPR:
6765 case MAX_EXPR:
6766 return true;
6768 default:
6769 break;
6771 return false;
6774 /* Return true if CODE represents a commutative tree code. Otherwise
6775 return false. */
6776 bool
6777 commutative_tree_code (enum tree_code code)
6779 switch (code)
6781 case PLUS_EXPR:
6782 case MULT_EXPR:
6783 case MULT_HIGHPART_EXPR:
6784 case MIN_EXPR:
6785 case MAX_EXPR:
6786 case BIT_IOR_EXPR:
6787 case BIT_XOR_EXPR:
6788 case BIT_AND_EXPR:
6789 case NE_EXPR:
6790 case EQ_EXPR:
6791 case UNORDERED_EXPR:
6792 case ORDERED_EXPR:
6793 case UNEQ_EXPR:
6794 case LTGT_EXPR:
6795 case TRUTH_AND_EXPR:
6796 case TRUTH_XOR_EXPR:
6797 case TRUTH_OR_EXPR:
6798 case WIDEN_MULT_EXPR:
6799 case VEC_WIDEN_MULT_HI_EXPR:
6800 case VEC_WIDEN_MULT_LO_EXPR:
6801 case VEC_WIDEN_MULT_EVEN_EXPR:
6802 case VEC_WIDEN_MULT_ODD_EXPR:
6803 return true;
6805 default:
6806 break;
6808 return false;
6811 /* Return true if CODE represents a ternary tree code for which the
6812 first two operands are commutative. Otherwise return false. */
6813 bool
6814 commutative_ternary_tree_code (enum tree_code code)
6816 switch (code)
6818 case WIDEN_MULT_PLUS_EXPR:
6819 case WIDEN_MULT_MINUS_EXPR:
6820 case DOT_PROD_EXPR:
6821 return true;
6823 default:
6824 break;
6826 return false;
6829 /* Returns true if CODE can overflow. */
6831 bool
6832 operation_can_overflow (enum tree_code code)
6834 switch (code)
6836 case PLUS_EXPR:
6837 case MINUS_EXPR:
6838 case MULT_EXPR:
6839 case LSHIFT_EXPR:
6840 /* Can overflow in various ways. */
6841 return true;
6842 case TRUNC_DIV_EXPR:
6843 case EXACT_DIV_EXPR:
6844 case FLOOR_DIV_EXPR:
6845 case CEIL_DIV_EXPR:
6846 /* For INT_MIN / -1. */
6847 return true;
6848 case NEGATE_EXPR:
6849 case ABS_EXPR:
6850 /* For -INT_MIN. */
6851 return true;
6852 default:
6853 /* These operators cannot overflow. */
6854 return false;
6858 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6859 ftrapv doesn't generate trapping insns for CODE. */
6861 bool
6862 operation_no_trapping_overflow (tree type, enum tree_code code)
6864 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6866 /* We don't generate instructions that trap on overflow for complex or vector
6867 types. */
6868 if (!INTEGRAL_TYPE_P (type))
6869 return true;
6871 if (!TYPE_OVERFLOW_TRAPS (type))
6872 return true;
6874 switch (code)
6876 case PLUS_EXPR:
6877 case MINUS_EXPR:
6878 case MULT_EXPR:
6879 case NEGATE_EXPR:
6880 case ABS_EXPR:
6881 /* These operators can overflow, and -ftrapv generates trapping code for
6882 these. */
6883 return false;
6884 case TRUNC_DIV_EXPR:
6885 case EXACT_DIV_EXPR:
6886 case FLOOR_DIV_EXPR:
6887 case CEIL_DIV_EXPR:
6888 case LSHIFT_EXPR:
6889 /* These operators can overflow, but -ftrapv does not generate trapping
6890 code for these. */
6891 return true;
6892 default:
6893 /* These operators cannot overflow. */
6894 return true;
6898 /* Constructors for pointer, array and function types.
6899 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6900 constructed by language-dependent code, not here.) */
6902 /* Construct, lay out and return the type of pointers to TO_TYPE with
6903 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6904 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6905 indicate this type can reference all of memory. If such a type has
6906 already been constructed, reuse it. */
6908 tree
6909 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6910 bool can_alias_all)
6912 tree t;
6913 bool could_alias = can_alias_all;
6915 if (to_type == error_mark_node)
6916 return error_mark_node;
6918 if (mode == VOIDmode)
6920 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6921 mode = targetm.addr_space.pointer_mode (as);
6924 /* If the pointed-to type has the may_alias attribute set, force
6925 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6926 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6927 can_alias_all = true;
6929 /* In some cases, languages will have things that aren't a POINTER_TYPE
6930 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6931 In that case, return that type without regard to the rest of our
6932 operands.
6934 ??? This is a kludge, but consistent with the way this function has
6935 always operated and there doesn't seem to be a good way to avoid this
6936 at the moment. */
6937 if (TYPE_POINTER_TO (to_type) != 0
6938 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6939 return TYPE_POINTER_TO (to_type);
6941 /* First, if we already have a type for pointers to TO_TYPE and it's
6942 the proper mode, use it. */
6943 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6944 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6945 return t;
6947 t = make_node (POINTER_TYPE);
6949 TREE_TYPE (t) = to_type;
6950 SET_TYPE_MODE (t, mode);
6951 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6952 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6953 TYPE_POINTER_TO (to_type) = t;
6955 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6956 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6957 SET_TYPE_STRUCTURAL_EQUALITY (t);
6958 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6959 TYPE_CANONICAL (t)
6960 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6961 mode, false);
6963 /* Lay out the type. This function has many callers that are concerned
6964 with expression-construction, and this simplifies them all. */
6965 layout_type (t);
6967 return t;
6970 /* By default build pointers in ptr_mode. */
6972 tree
6973 build_pointer_type (tree to_type)
6975 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6978 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6980 tree
6981 build_reference_type_for_mode (tree to_type, machine_mode mode,
6982 bool can_alias_all)
6984 tree t;
6985 bool could_alias = can_alias_all;
6987 if (to_type == error_mark_node)
6988 return error_mark_node;
6990 if (mode == VOIDmode)
6992 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6993 mode = targetm.addr_space.pointer_mode (as);
6996 /* If the pointed-to type has the may_alias attribute set, force
6997 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6998 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6999 can_alias_all = true;
7001 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7002 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7003 In that case, return that type without regard to the rest of our
7004 operands.
7006 ??? This is a kludge, but consistent with the way this function has
7007 always operated and there doesn't seem to be a good way to avoid this
7008 at the moment. */
7009 if (TYPE_REFERENCE_TO (to_type) != 0
7010 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7011 return TYPE_REFERENCE_TO (to_type);
7013 /* First, if we already have a type for pointers to TO_TYPE and it's
7014 the proper mode, use it. */
7015 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7016 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7017 return t;
7019 t = make_node (REFERENCE_TYPE);
7021 TREE_TYPE (t) = to_type;
7022 SET_TYPE_MODE (t, mode);
7023 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7024 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7025 TYPE_REFERENCE_TO (to_type) = t;
7027 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7028 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7029 SET_TYPE_STRUCTURAL_EQUALITY (t);
7030 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7031 TYPE_CANONICAL (t)
7032 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7033 mode, false);
7035 layout_type (t);
7037 return t;
7041 /* Build the node for the type of references-to-TO_TYPE by default
7042 in ptr_mode. */
7044 tree
7045 build_reference_type (tree to_type)
7047 return build_reference_type_for_mode (to_type, VOIDmode, false);
7050 #define MAX_INT_CACHED_PREC \
7051 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7052 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7054 static void
7055 clear_nonstandard_integer_type_cache (void)
7057 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7059 nonstandard_integer_type_cache[i] = NULL;
7063 /* Builds a signed or unsigned integer type of precision PRECISION.
7064 Used for C bitfields whose precision does not match that of
7065 built-in target types. */
7066 tree
7067 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7068 int unsignedp)
7070 tree itype, ret;
7072 if (unsignedp)
7073 unsignedp = MAX_INT_CACHED_PREC + 1;
7075 if (precision <= MAX_INT_CACHED_PREC)
7077 itype = nonstandard_integer_type_cache[precision + unsignedp];
7078 if (itype)
7079 return itype;
7082 itype = make_node (INTEGER_TYPE);
7083 TYPE_PRECISION (itype) = precision;
7085 if (unsignedp)
7086 fixup_unsigned_type (itype);
7087 else
7088 fixup_signed_type (itype);
7090 inchash::hash hstate;
7091 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7092 ret = type_hash_canon (hstate.end (), itype);
7093 if (precision <= MAX_INT_CACHED_PREC)
7094 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7096 return ret;
7099 #define MAX_BOOL_CACHED_PREC \
7100 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7101 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7103 /* Builds a boolean type of precision PRECISION.
7104 Used for boolean vectors to choose proper vector element size. */
7105 tree
7106 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7108 tree type;
7110 if (precision <= MAX_BOOL_CACHED_PREC)
7112 type = nonstandard_boolean_type_cache[precision];
7113 if (type)
7114 return type;
7117 type = make_node (BOOLEAN_TYPE);
7118 TYPE_PRECISION (type) = precision;
7119 fixup_signed_type (type);
7121 if (precision <= MAX_INT_CACHED_PREC)
7122 nonstandard_boolean_type_cache[precision] = type;
7124 return type;
7127 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7128 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7129 is true, reuse such a type that has already been constructed. */
7131 static tree
7132 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7134 tree itype = make_node (INTEGER_TYPE);
7136 TREE_TYPE (itype) = type;
7138 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7139 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7141 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7142 SET_TYPE_MODE (itype, TYPE_MODE (type));
7143 TYPE_SIZE (itype) = TYPE_SIZE (type);
7144 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7145 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7146 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7147 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7149 if (!shared)
7150 return itype;
7152 if ((TYPE_MIN_VALUE (itype)
7153 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7154 || (TYPE_MAX_VALUE (itype)
7155 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7157 /* Since we cannot reliably merge this type, we need to compare it using
7158 structural equality checks. */
7159 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7160 return itype;
7163 hashval_t hash = type_hash_canon_hash (itype);
7164 itype = type_hash_canon (hash, itype);
7166 return itype;
7169 /* Wrapper around build_range_type_1 with SHARED set to true. */
7171 tree
7172 build_range_type (tree type, tree lowval, tree highval)
7174 return build_range_type_1 (type, lowval, highval, true);
7177 /* Wrapper around build_range_type_1 with SHARED set to false. */
7179 tree
7180 build_nonshared_range_type (tree type, tree lowval, tree highval)
7182 return build_range_type_1 (type, lowval, highval, false);
7185 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7186 MAXVAL should be the maximum value in the domain
7187 (one less than the length of the array).
7189 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7190 We don't enforce this limit, that is up to caller (e.g. language front end).
7191 The limit exists because the result is a signed type and we don't handle
7192 sizes that use more than one HOST_WIDE_INT. */
7194 tree
7195 build_index_type (tree maxval)
7197 return build_range_type (sizetype, size_zero_node, maxval);
7200 /* Return true if the debug information for TYPE, a subtype, should be emitted
7201 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7202 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7203 debug info and doesn't reflect the source code. */
7205 bool
7206 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7208 tree base_type = TREE_TYPE (type), low, high;
7210 /* Subrange types have a base type which is an integral type. */
7211 if (!INTEGRAL_TYPE_P (base_type))
7212 return false;
7214 /* Get the real bounds of the subtype. */
7215 if (lang_hooks.types.get_subrange_bounds)
7216 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7217 else
7219 low = TYPE_MIN_VALUE (type);
7220 high = TYPE_MAX_VALUE (type);
7223 /* If the type and its base type have the same representation and the same
7224 name, then the type is not a subrange but a copy of the base type. */
7225 if ((TREE_CODE (base_type) == INTEGER_TYPE
7226 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7227 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7228 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7229 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7230 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7231 return false;
7233 if (lowval)
7234 *lowval = low;
7235 if (highval)
7236 *highval = high;
7237 return true;
7240 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7241 and number of elements specified by the range of values of INDEX_TYPE.
7242 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7243 If SHARED is true, reuse such a type that has already been constructed.
7244 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7246 tree
7247 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7248 bool shared, bool set_canonical)
7250 tree t;
7252 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7254 error ("arrays of functions are not meaningful");
7255 elt_type = integer_type_node;
7258 t = make_node (ARRAY_TYPE);
7259 TREE_TYPE (t) = elt_type;
7260 TYPE_DOMAIN (t) = index_type;
7261 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7262 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7263 layout_type (t);
7265 if (shared)
7267 hashval_t hash = type_hash_canon_hash (t);
7268 t = type_hash_canon (hash, t);
7271 if (TYPE_CANONICAL (t) == t && set_canonical)
7273 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7274 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7275 || in_lto_p)
7276 SET_TYPE_STRUCTURAL_EQUALITY (t);
7277 else if (TYPE_CANONICAL (elt_type) != elt_type
7278 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7279 TYPE_CANONICAL (t)
7280 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7281 index_type
7282 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7283 typeless_storage, shared, set_canonical);
7286 return t;
7289 /* Wrapper around build_array_type_1 with SHARED set to true. */
7291 tree
7292 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7294 return
7295 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7298 /* Wrapper around build_array_type_1 with SHARED set to false. */
7300 tree
7301 build_nonshared_array_type (tree elt_type, tree index_type)
7303 return build_array_type_1 (elt_type, index_type, false, false, true);
7306 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7307 sizetype. */
7309 tree
7310 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7312 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7315 /* Recursively examines the array elements of TYPE, until a non-array
7316 element type is found. */
7318 tree
7319 strip_array_types (tree type)
7321 while (TREE_CODE (type) == ARRAY_TYPE)
7322 type = TREE_TYPE (type);
7324 return type;
7327 /* Computes the canonical argument types from the argument type list
7328 ARGTYPES.
7330 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7331 on entry to this function, or if any of the ARGTYPES are
7332 structural.
7334 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7335 true on entry to this function, or if any of the ARGTYPES are
7336 non-canonical.
7338 Returns a canonical argument list, which may be ARGTYPES when the
7339 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7340 true) or would not differ from ARGTYPES. */
7342 static tree
7343 maybe_canonicalize_argtypes (tree argtypes,
7344 bool *any_structural_p,
7345 bool *any_noncanonical_p)
7347 tree arg;
7348 bool any_noncanonical_argtypes_p = false;
7350 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7352 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7353 /* Fail gracefully by stating that the type is structural. */
7354 *any_structural_p = true;
7355 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7356 *any_structural_p = true;
7357 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7358 || TREE_PURPOSE (arg))
7359 /* If the argument has a default argument, we consider it
7360 non-canonical even though the type itself is canonical.
7361 That way, different variants of function and method types
7362 with default arguments will all point to the variant with
7363 no defaults as their canonical type. */
7364 any_noncanonical_argtypes_p = true;
7367 if (*any_structural_p)
7368 return argtypes;
7370 if (any_noncanonical_argtypes_p)
7372 /* Build the canonical list of argument types. */
7373 tree canon_argtypes = NULL_TREE;
7374 bool is_void = false;
7376 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7378 if (arg == void_list_node)
7379 is_void = true;
7380 else
7381 canon_argtypes = tree_cons (NULL_TREE,
7382 TYPE_CANONICAL (TREE_VALUE (arg)),
7383 canon_argtypes);
7386 canon_argtypes = nreverse (canon_argtypes);
7387 if (is_void)
7388 canon_argtypes = chainon (canon_argtypes, void_list_node);
7390 /* There is a non-canonical type. */
7391 *any_noncanonical_p = true;
7392 return canon_argtypes;
7395 /* The canonical argument types are the same as ARGTYPES. */
7396 return argtypes;
7399 /* Construct, lay out and return
7400 the type of functions returning type VALUE_TYPE
7401 given arguments of types ARG_TYPES.
7402 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7403 are data type nodes for the arguments of the function.
7404 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7405 variable-arguments function with (...) prototype (no named arguments).
7406 If such a type has already been constructed, reuse it. */
7408 tree
7409 build_function_type (tree value_type, tree arg_types,
7410 bool no_named_args_stdarg_p)
7412 tree t;
7413 inchash::hash hstate;
7414 bool any_structural_p, any_noncanonical_p;
7415 tree canon_argtypes;
7417 gcc_assert (arg_types != error_mark_node);
7419 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7421 error ("function return type cannot be function");
7422 value_type = integer_type_node;
7425 /* Make a node of the sort we want. */
7426 t = make_node (FUNCTION_TYPE);
7427 TREE_TYPE (t) = value_type;
7428 TYPE_ARG_TYPES (t) = arg_types;
7429 if (no_named_args_stdarg_p)
7431 gcc_assert (arg_types == NULL_TREE);
7432 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7435 /* If we already have such a type, use the old one. */
7436 hashval_t hash = type_hash_canon_hash (t);
7437 t = type_hash_canon (hash, t);
7439 /* Set up the canonical type. */
7440 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7441 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7442 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7443 &any_structural_p,
7444 &any_noncanonical_p);
7445 if (any_structural_p)
7446 SET_TYPE_STRUCTURAL_EQUALITY (t);
7447 else if (any_noncanonical_p)
7448 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7449 canon_argtypes);
7451 if (!COMPLETE_TYPE_P (t))
7452 layout_type (t);
7453 return t;
7456 /* Build a function type. The RETURN_TYPE is the type returned by the
7457 function. If VAARGS is set, no void_type_node is appended to the
7458 list. ARGP must be always be terminated be a NULL_TREE. */
7460 static tree
7461 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7463 tree t, args, last;
7465 t = va_arg (argp, tree);
7466 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7467 args = tree_cons (NULL_TREE, t, args);
7469 if (vaargs)
7471 last = args;
7472 if (args != NULL_TREE)
7473 args = nreverse (args);
7474 gcc_assert (last != void_list_node);
7476 else if (args == NULL_TREE)
7477 args = void_list_node;
7478 else
7480 last = args;
7481 args = nreverse (args);
7482 TREE_CHAIN (last) = void_list_node;
7484 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7486 return args;
7489 /* Build a function type. The RETURN_TYPE is the type returned by the
7490 function. If additional arguments are provided, they are
7491 additional argument types. The list of argument types must always
7492 be terminated by NULL_TREE. */
7494 tree
7495 build_function_type_list (tree return_type, ...)
7497 tree args;
7498 va_list p;
7500 va_start (p, return_type);
7501 args = build_function_type_list_1 (false, return_type, p);
7502 va_end (p);
7503 return args;
7506 /* Build a variable argument function type. The RETURN_TYPE is the
7507 type returned by the function. If additional arguments are provided,
7508 they are additional argument types. The list of argument types must
7509 always be terminated by NULL_TREE. */
7511 tree
7512 build_varargs_function_type_list (tree return_type, ...)
7514 tree args;
7515 va_list p;
7517 va_start (p, return_type);
7518 args = build_function_type_list_1 (true, return_type, p);
7519 va_end (p);
7521 return args;
7524 /* Build a function type. RETURN_TYPE is the type returned by the
7525 function; VAARGS indicates whether the function takes varargs. The
7526 function takes N named arguments, the types of which are provided in
7527 ARG_TYPES. */
7529 static tree
7530 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7531 tree *arg_types)
7533 int i;
7534 tree t = vaargs ? NULL_TREE : void_list_node;
7536 for (i = n - 1; i >= 0; i--)
7537 t = tree_cons (NULL_TREE, arg_types[i], t);
7539 return build_function_type (return_type, t, vaargs && n == 0);
7542 /* Build a function type. RETURN_TYPE is the type returned by the
7543 function. The function takes N named arguments, the types of which
7544 are provided in ARG_TYPES. */
7546 tree
7547 build_function_type_array (tree return_type, int n, tree *arg_types)
7549 return build_function_type_array_1 (false, return_type, n, arg_types);
7552 /* Build a variable argument function type. RETURN_TYPE is the type
7553 returned by the function. The function takes N named arguments, the
7554 types of which are provided in ARG_TYPES. */
7556 tree
7557 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7559 return build_function_type_array_1 (true, return_type, n, arg_types);
7562 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7563 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7564 for the method. An implicit additional parameter (of type
7565 pointer-to-BASETYPE) is added to the ARGTYPES. */
7567 tree
7568 build_method_type_directly (tree basetype,
7569 tree rettype,
7570 tree argtypes)
7572 tree t;
7573 tree ptype;
7574 bool any_structural_p, any_noncanonical_p;
7575 tree canon_argtypes;
7577 /* Make a node of the sort we want. */
7578 t = make_node (METHOD_TYPE);
7580 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7581 TREE_TYPE (t) = rettype;
7582 ptype = build_pointer_type (basetype);
7584 /* The actual arglist for this function includes a "hidden" argument
7585 which is "this". Put it into the list of argument types. */
7586 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7587 TYPE_ARG_TYPES (t) = argtypes;
7589 /* If we already have such a type, use the old one. */
7590 hashval_t hash = type_hash_canon_hash (t);
7591 t = type_hash_canon (hash, t);
7593 /* Set up the canonical type. */
7594 any_structural_p
7595 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7596 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7597 any_noncanonical_p
7598 = (TYPE_CANONICAL (basetype) != basetype
7599 || TYPE_CANONICAL (rettype) != rettype);
7600 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7601 &any_structural_p,
7602 &any_noncanonical_p);
7603 if (any_structural_p)
7604 SET_TYPE_STRUCTURAL_EQUALITY (t);
7605 else if (any_noncanonical_p)
7606 TYPE_CANONICAL (t)
7607 = build_method_type_directly (TYPE_CANONICAL (basetype),
7608 TYPE_CANONICAL (rettype),
7609 canon_argtypes);
7610 if (!COMPLETE_TYPE_P (t))
7611 layout_type (t);
7613 return t;
7616 /* Construct, lay out and return the type of methods belonging to class
7617 BASETYPE and whose arguments and values are described by TYPE.
7618 If that type exists already, reuse it.
7619 TYPE must be a FUNCTION_TYPE node. */
7621 tree
7622 build_method_type (tree basetype, tree type)
7624 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7626 return build_method_type_directly (basetype,
7627 TREE_TYPE (type),
7628 TYPE_ARG_TYPES (type));
7631 /* Construct, lay out and return the type of offsets to a value
7632 of type TYPE, within an object of type BASETYPE.
7633 If a suitable offset type exists already, reuse it. */
7635 tree
7636 build_offset_type (tree basetype, tree type)
7638 tree t;
7640 /* Make a node of the sort we want. */
7641 t = make_node (OFFSET_TYPE);
7643 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7644 TREE_TYPE (t) = type;
7646 /* If we already have such a type, use the old one. */
7647 hashval_t hash = type_hash_canon_hash (t);
7648 t = type_hash_canon (hash, t);
7650 if (!COMPLETE_TYPE_P (t))
7651 layout_type (t);
7653 if (TYPE_CANONICAL (t) == t)
7655 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7656 || TYPE_STRUCTURAL_EQUALITY_P (type))
7657 SET_TYPE_STRUCTURAL_EQUALITY (t);
7658 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7659 || TYPE_CANONICAL (type) != type)
7660 TYPE_CANONICAL (t)
7661 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7662 TYPE_CANONICAL (type));
7665 return t;
7668 /* Create a complex type whose components are COMPONENT_TYPE.
7670 If NAMED is true, the type is given a TYPE_NAME. We do not always
7671 do so because this creates a DECL node and thus make the DECL_UIDs
7672 dependent on the type canonicalization hashtable, which is GC-ed,
7673 so the DECL_UIDs would not be stable wrt garbage collection. */
7675 tree
7676 build_complex_type (tree component_type, bool named)
7678 gcc_assert (INTEGRAL_TYPE_P (component_type)
7679 || SCALAR_FLOAT_TYPE_P (component_type)
7680 || FIXED_POINT_TYPE_P (component_type));
7682 /* Make a node of the sort we want. */
7683 tree probe = make_node (COMPLEX_TYPE);
7685 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7687 /* If we already have such a type, use the old one. */
7688 hashval_t hash = type_hash_canon_hash (probe);
7689 tree t = type_hash_canon (hash, probe);
7691 if (t == probe)
7693 /* We created a new type. The hash insertion will have laid
7694 out the type. We need to check the canonicalization and
7695 maybe set the name. */
7696 gcc_checking_assert (COMPLETE_TYPE_P (t)
7697 && !TYPE_NAME (t)
7698 && TYPE_CANONICAL (t) == t);
7700 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7701 SET_TYPE_STRUCTURAL_EQUALITY (t);
7702 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7703 TYPE_CANONICAL (t)
7704 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7706 /* We need to create a name, since complex is a fundamental type. */
7707 if (named)
7709 const char *name = NULL;
7711 if (TREE_TYPE (t) == char_type_node)
7712 name = "complex char";
7713 else if (TREE_TYPE (t) == signed_char_type_node)
7714 name = "complex signed char";
7715 else if (TREE_TYPE (t) == unsigned_char_type_node)
7716 name = "complex unsigned char";
7717 else if (TREE_TYPE (t) == short_integer_type_node)
7718 name = "complex short int";
7719 else if (TREE_TYPE (t) == short_unsigned_type_node)
7720 name = "complex short unsigned int";
7721 else if (TREE_TYPE (t) == integer_type_node)
7722 name = "complex int";
7723 else if (TREE_TYPE (t) == unsigned_type_node)
7724 name = "complex unsigned int";
7725 else if (TREE_TYPE (t) == long_integer_type_node)
7726 name = "complex long int";
7727 else if (TREE_TYPE (t) == long_unsigned_type_node)
7728 name = "complex long unsigned int";
7729 else if (TREE_TYPE (t) == long_long_integer_type_node)
7730 name = "complex long long int";
7731 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7732 name = "complex long long unsigned int";
7734 if (name != NULL)
7735 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7736 get_identifier (name), t);
7740 return build_qualified_type (t, TYPE_QUALS (component_type));
7743 /* If TYPE is a real or complex floating-point type and the target
7744 does not directly support arithmetic on TYPE then return the wider
7745 type to be used for arithmetic on TYPE. Otherwise, return
7746 NULL_TREE. */
7748 tree
7749 excess_precision_type (tree type)
7751 /* The target can give two different responses to the question of
7752 which excess precision mode it would like depending on whether we
7753 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7755 enum excess_precision_type requested_type
7756 = (flag_excess_precision == EXCESS_PRECISION_FAST
7757 ? EXCESS_PRECISION_TYPE_FAST
7758 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7759 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7761 enum flt_eval_method target_flt_eval_method
7762 = targetm.c.excess_precision (requested_type);
7764 /* The target should not ask for unpredictable float evaluation (though
7765 it might advertise that implicitly the evaluation is unpredictable,
7766 but we don't care about that here, it will have been reported
7767 elsewhere). If it does ask for unpredictable evaluation, we have
7768 nothing to do here. */
7769 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7771 /* Nothing to do. The target has asked for all types we know about
7772 to be computed with their native precision and range. */
7773 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7774 return NULL_TREE;
7776 /* The target will promote this type in a target-dependent way, so excess
7777 precision ought to leave it alone. */
7778 if (targetm.promoted_type (type) != NULL_TREE)
7779 return NULL_TREE;
7781 machine_mode float16_type_mode = (float16_type_node
7782 ? TYPE_MODE (float16_type_node)
7783 : VOIDmode);
7784 machine_mode bfloat16_type_mode = (bfloat16_type_node
7785 ? TYPE_MODE (bfloat16_type_node)
7786 : VOIDmode);
7787 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7788 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7790 switch (TREE_CODE (type))
7792 case REAL_TYPE:
7794 machine_mode type_mode = TYPE_MODE (type);
7795 switch (target_flt_eval_method)
7797 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7798 if (type_mode == float16_type_mode
7799 || type_mode == bfloat16_type_mode)
7800 return float_type_node;
7801 break;
7802 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7803 if (type_mode == float16_type_mode
7804 || type_mode == bfloat16_type_mode
7805 || type_mode == float_type_mode)
7806 return double_type_node;
7807 break;
7808 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7809 if (type_mode == float16_type_mode
7810 || type_mode == bfloat16_type_mode
7811 || type_mode == float_type_mode
7812 || type_mode == double_type_mode)
7813 return long_double_type_node;
7814 break;
7815 default:
7816 gcc_unreachable ();
7818 break;
7820 case COMPLEX_TYPE:
7822 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7823 return NULL_TREE;
7824 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7825 switch (target_flt_eval_method)
7827 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7828 if (type_mode == float16_type_mode
7829 || type_mode == bfloat16_type_mode)
7830 return complex_float_type_node;
7831 break;
7832 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7833 if (type_mode == float16_type_mode
7834 || type_mode == bfloat16_type_mode
7835 || type_mode == float_type_mode)
7836 return complex_double_type_node;
7837 break;
7838 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7839 if (type_mode == float16_type_mode
7840 || type_mode == bfloat16_type_mode
7841 || type_mode == float_type_mode
7842 || type_mode == double_type_mode)
7843 return complex_long_double_type_node;
7844 break;
7845 default:
7846 gcc_unreachable ();
7848 break;
7850 default:
7851 break;
7854 return NULL_TREE;
7857 /* Return OP, stripped of any conversions to wider types as much as is safe.
7858 Converting the value back to OP's type makes a value equivalent to OP.
7860 If FOR_TYPE is nonzero, we return a value which, if converted to
7861 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7863 OP must have integer, real or enumeral type. Pointers are not allowed!
7865 There are some cases where the obvious value we could return
7866 would regenerate to OP if converted to OP's type,
7867 but would not extend like OP to wider types.
7868 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7869 For example, if OP is (unsigned short)(signed char)-1,
7870 we avoid returning (signed char)-1 if FOR_TYPE is int,
7871 even though extending that to an unsigned short would regenerate OP,
7872 since the result of extending (signed char)-1 to (int)
7873 is different from (int) OP. */
7875 tree
7876 get_unwidened (tree op, tree for_type)
7878 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7879 tree type = TREE_TYPE (op);
7880 unsigned final_prec
7881 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7882 int uns
7883 = (for_type != 0 && for_type != type
7884 && final_prec > TYPE_PRECISION (type)
7885 && TYPE_UNSIGNED (type));
7886 tree win = op;
7888 while (CONVERT_EXPR_P (op))
7890 int bitschange;
7892 /* TYPE_PRECISION on vector types has different meaning
7893 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7894 so avoid them here. */
7895 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7896 break;
7898 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7899 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7901 /* Truncations are many-one so cannot be removed.
7902 Unless we are later going to truncate down even farther. */
7903 if (bitschange < 0
7904 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7905 break;
7907 /* See what's inside this conversion. If we decide to strip it,
7908 we will set WIN. */
7909 op = TREE_OPERAND (op, 0);
7911 /* If we have not stripped any zero-extensions (uns is 0),
7912 we can strip any kind of extension.
7913 If we have previously stripped a zero-extension,
7914 only zero-extensions can safely be stripped.
7915 Any extension can be stripped if the bits it would produce
7916 are all going to be discarded later by truncating to FOR_TYPE. */
7918 if (bitschange > 0)
7920 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7921 win = op;
7922 /* TYPE_UNSIGNED says whether this is a zero-extension.
7923 Let's avoid computing it if it does not affect WIN
7924 and if UNS will not be needed again. */
7925 if ((uns
7926 || CONVERT_EXPR_P (op))
7927 && TYPE_UNSIGNED (TREE_TYPE (op)))
7929 uns = 1;
7930 win = op;
7935 /* If we finally reach a constant see if it fits in sth smaller and
7936 in that case convert it. */
7937 if (TREE_CODE (win) == INTEGER_CST)
7939 tree wtype = TREE_TYPE (win);
7940 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7941 if (for_type)
7942 prec = MAX (prec, final_prec);
7943 if (prec < TYPE_PRECISION (wtype))
7945 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7946 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7947 win = fold_convert (t, win);
7951 return win;
7954 /* Return OP or a simpler expression for a narrower value
7955 which can be sign-extended or zero-extended to give back OP.
7956 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7957 or 0 if the value should be sign-extended. */
7959 tree
7960 get_narrower (tree op, int *unsignedp_ptr)
7962 int uns = 0;
7963 int first = 1;
7964 tree win = op;
7965 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7967 if (TREE_CODE (op) == COMPOUND_EXPR)
7970 op = TREE_OPERAND (op, 1);
7971 while (TREE_CODE (op) == COMPOUND_EXPR);
7972 tree ret = get_narrower (op, unsignedp_ptr);
7973 if (ret == op)
7974 return win;
7975 auto_vec <tree, 16> v;
7976 unsigned int i;
7977 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7978 op = TREE_OPERAND (op, 1))
7979 v.safe_push (op);
7980 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7981 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7982 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7983 ret);
7984 return ret;
7986 while (TREE_CODE (op) == NOP_EXPR)
7988 int bitschange
7989 = (TYPE_PRECISION (TREE_TYPE (op))
7990 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7992 /* Truncations are many-one so cannot be removed. */
7993 if (bitschange < 0)
7994 break;
7996 /* See what's inside this conversion. If we decide to strip it,
7997 we will set WIN. */
7999 if (bitschange > 0)
8001 op = TREE_OPERAND (op, 0);
8002 /* An extension: the outermost one can be stripped,
8003 but remember whether it is zero or sign extension. */
8004 if (first)
8005 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8006 /* Otherwise, if a sign extension has been stripped,
8007 only sign extensions can now be stripped;
8008 if a zero extension has been stripped, only zero-extensions. */
8009 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8010 break;
8011 first = 0;
8013 else /* bitschange == 0 */
8015 /* A change in nominal type can always be stripped, but we must
8016 preserve the unsignedness. */
8017 if (first)
8018 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8019 first = 0;
8020 op = TREE_OPERAND (op, 0);
8021 /* Keep trying to narrow, but don't assign op to win if it
8022 would turn an integral type into something else. */
8023 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8024 continue;
8027 win = op;
8030 if (TREE_CODE (op) == COMPONENT_REF
8031 /* Since type_for_size always gives an integer type. */
8032 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8033 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8034 /* Ensure field is laid out already. */
8035 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8036 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8038 unsigned HOST_WIDE_INT innerprec
8039 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8040 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8041 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8042 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8044 /* We can get this structure field in a narrower type that fits it,
8045 but the resulting extension to its nominal type (a fullword type)
8046 must satisfy the same conditions as for other extensions.
8048 Do this only for fields that are aligned (not bit-fields),
8049 because when bit-field insns will be used there is no
8050 advantage in doing this. */
8052 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8053 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8054 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8055 && type != 0)
8057 if (first)
8058 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8059 win = fold_convert (type, op);
8063 *unsignedp_ptr = uns;
8064 return win;
8067 /* Return true if integer constant C has a value that is permissible
8068 for TYPE, an integral type. */
8070 bool
8071 int_fits_type_p (const_tree c, const_tree type)
8073 tree type_low_bound, type_high_bound;
8074 bool ok_for_low_bound, ok_for_high_bound;
8075 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8077 /* Non-standard boolean types can have arbitrary precision but various
8078 transformations assume that they can only take values 0 and +/-1. */
8079 if (TREE_CODE (type) == BOOLEAN_TYPE)
8080 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8082 retry:
8083 type_low_bound = TYPE_MIN_VALUE (type);
8084 type_high_bound = TYPE_MAX_VALUE (type);
8086 /* If at least one bound of the type is a constant integer, we can check
8087 ourselves and maybe make a decision. If no such decision is possible, but
8088 this type is a subtype, try checking against that. Otherwise, use
8089 fits_to_tree_p, which checks against the precision.
8091 Compute the status for each possibly constant bound, and return if we see
8092 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8093 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8094 for "constant known to fit". */
8096 /* Check if c >= type_low_bound. */
8097 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8099 if (tree_int_cst_lt (c, type_low_bound))
8100 return false;
8101 ok_for_low_bound = true;
8103 else
8104 ok_for_low_bound = false;
8106 /* Check if c <= type_high_bound. */
8107 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8109 if (tree_int_cst_lt (type_high_bound, c))
8110 return false;
8111 ok_for_high_bound = true;
8113 else
8114 ok_for_high_bound = false;
8116 /* If the constant fits both bounds, the result is known. */
8117 if (ok_for_low_bound && ok_for_high_bound)
8118 return true;
8120 /* Perform some generic filtering which may allow making a decision
8121 even if the bounds are not constant. First, negative integers
8122 never fit in unsigned types, */
8123 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8124 return false;
8126 /* Second, narrower types always fit in wider ones. */
8127 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8128 return true;
8130 /* Third, unsigned integers with top bit set never fit signed types. */
8131 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8133 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8134 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8136 /* When a tree_cst is converted to a wide-int, the precision
8137 is taken from the type. However, if the precision of the
8138 mode underneath the type is smaller than that, it is
8139 possible that the value will not fit. The test below
8140 fails if any bit is set between the sign bit of the
8141 underlying mode and the top bit of the type. */
8142 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8143 return false;
8145 else if (wi::neg_p (wi::to_wide (c)))
8146 return false;
8149 /* If we haven't been able to decide at this point, there nothing more we
8150 can check ourselves here. Look at the base type if we have one and it
8151 has the same precision. */
8152 if (TREE_CODE (type) == INTEGER_TYPE
8153 && TREE_TYPE (type) != 0
8154 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8156 type = TREE_TYPE (type);
8157 goto retry;
8160 /* Or to fits_to_tree_p, if nothing else. */
8161 return wi::fits_to_tree_p (wi::to_wide (c), type);
8164 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8165 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8166 represented (assuming two's-complement arithmetic) within the bit
8167 precision of the type are returned instead. */
8169 void
8170 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8172 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8173 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8174 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8175 else
8177 if (TYPE_UNSIGNED (type))
8178 mpz_set_ui (min, 0);
8179 else
8181 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8182 wi::to_mpz (mn, min, SIGNED);
8186 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8187 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8188 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8189 else
8191 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8192 wi::to_mpz (mn, max, TYPE_SIGN (type));
8196 /* Return true if VAR is an automatic variable. */
8198 bool
8199 auto_var_p (const_tree var)
8201 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8202 || TREE_CODE (var) == PARM_DECL)
8203 && ! TREE_STATIC (var))
8204 || TREE_CODE (var) == RESULT_DECL);
8207 /* Return true if VAR is an automatic variable defined in function FN. */
8209 bool
8210 auto_var_in_fn_p (const_tree var, const_tree fn)
8212 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8213 && (auto_var_p (var)
8214 || TREE_CODE (var) == LABEL_DECL));
8217 /* Subprogram of following function. Called by walk_tree.
8219 Return *TP if it is an automatic variable or parameter of the
8220 function passed in as DATA. */
8222 static tree
8223 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8225 tree fn = (tree) data;
8227 if (TYPE_P (*tp))
8228 *walk_subtrees = 0;
8230 else if (DECL_P (*tp)
8231 && auto_var_in_fn_p (*tp, fn))
8232 return *tp;
8234 return NULL_TREE;
8237 /* Returns true if T is, contains, or refers to a type with variable
8238 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8239 arguments, but not the return type. If FN is nonzero, only return
8240 true if a modifier of the type or position of FN is a variable or
8241 parameter inside FN.
8243 This concept is more general than that of C99 'variably modified types':
8244 in C99, a struct type is never variably modified because a VLA may not
8245 appear as a structure member. However, in GNU C code like:
8247 struct S { int i[f()]; };
8249 is valid, and other languages may define similar constructs. */
8251 bool
8252 variably_modified_type_p (tree type, tree fn)
8254 tree t;
8256 /* Test if T is either variable (if FN is zero) or an expression containing
8257 a variable in FN. If TYPE isn't gimplified, return true also if
8258 gimplify_one_sizepos would gimplify the expression into a local
8259 variable. */
8260 #define RETURN_TRUE_IF_VAR(T) \
8261 do { tree _t = (T); \
8262 if (_t != NULL_TREE \
8263 && _t != error_mark_node \
8264 && !CONSTANT_CLASS_P (_t) \
8265 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8266 && (!fn \
8267 || (!TYPE_SIZES_GIMPLIFIED (type) \
8268 && (TREE_CODE (_t) != VAR_DECL \
8269 && !CONTAINS_PLACEHOLDER_P (_t))) \
8270 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8271 return true; } while (0)
8273 if (type == error_mark_node)
8274 return false;
8276 /* If TYPE itself has variable size, it is variably modified. */
8277 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8278 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8280 switch (TREE_CODE (type))
8282 case POINTER_TYPE:
8283 case REFERENCE_TYPE:
8284 case VECTOR_TYPE:
8285 /* Ada can have pointer types refering to themselves indirectly. */
8286 if (TREE_VISITED (type))
8287 return false;
8288 TREE_VISITED (type) = true;
8289 if (variably_modified_type_p (TREE_TYPE (type), fn))
8291 TREE_VISITED (type) = false;
8292 return true;
8294 TREE_VISITED (type) = false;
8295 break;
8297 case FUNCTION_TYPE:
8298 case METHOD_TYPE:
8299 /* If TYPE is a function type, it is variably modified if the
8300 return type is variably modified. */
8301 if (variably_modified_type_p (TREE_TYPE (type), fn))
8302 return true;
8303 break;
8305 case INTEGER_TYPE:
8306 case REAL_TYPE:
8307 case FIXED_POINT_TYPE:
8308 case ENUMERAL_TYPE:
8309 case BOOLEAN_TYPE:
8310 /* Scalar types are variably modified if their end points
8311 aren't constant. */
8312 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8313 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8314 break;
8316 case RECORD_TYPE:
8317 case UNION_TYPE:
8318 case QUAL_UNION_TYPE:
8319 /* We can't see if any of the fields are variably-modified by the
8320 definition we normally use, since that would produce infinite
8321 recursion via pointers. */
8322 /* This is variably modified if some field's type is. */
8323 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8324 if (TREE_CODE (t) == FIELD_DECL)
8326 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8327 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8328 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8330 /* If the type is a qualified union, then the DECL_QUALIFIER
8331 of fields can also be an expression containing a variable. */
8332 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8333 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8335 /* If the field is a qualified union, then it's only a container
8336 for what's inside so we look into it. That's necessary in LTO
8337 mode because the sizes of the field tested above have been set
8338 to PLACEHOLDER_EXPRs by free_lang_data. */
8339 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8340 && variably_modified_type_p (TREE_TYPE (t), fn))
8341 return true;
8343 break;
8345 case ARRAY_TYPE:
8346 /* Do not call ourselves to avoid infinite recursion. This is
8347 variably modified if the element type is. */
8348 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8349 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8350 break;
8352 default:
8353 break;
8356 /* The current language may have other cases to check, but in general,
8357 all other types are not variably modified. */
8358 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8360 #undef RETURN_TRUE_IF_VAR
8363 /* Given a DECL or TYPE, return the scope in which it was declared, or
8364 NULL_TREE if there is no containing scope. */
8366 tree
8367 get_containing_scope (const_tree t)
8369 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8372 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8374 const_tree
8375 get_ultimate_context (const_tree decl)
8377 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8379 if (TREE_CODE (decl) == BLOCK)
8380 decl = BLOCK_SUPERCONTEXT (decl);
8381 else
8382 decl = get_containing_scope (decl);
8384 return decl;
8387 /* Return the innermost context enclosing DECL that is
8388 a FUNCTION_DECL, or zero if none. */
8390 tree
8391 decl_function_context (const_tree decl)
8393 tree context;
8395 if (TREE_CODE (decl) == ERROR_MARK)
8396 return 0;
8398 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8399 where we look up the function at runtime. Such functions always take
8400 a first argument of type 'pointer to real context'.
8402 C++ should really be fixed to use DECL_CONTEXT for the real context,
8403 and use something else for the "virtual context". */
8404 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8405 context
8406 = TYPE_MAIN_VARIANT
8407 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8408 else
8409 context = DECL_CONTEXT (decl);
8411 while (context && TREE_CODE (context) != FUNCTION_DECL)
8413 if (TREE_CODE (context) == BLOCK)
8414 context = BLOCK_SUPERCONTEXT (context);
8415 else
8416 context = get_containing_scope (context);
8419 return context;
8422 /* Return the innermost context enclosing DECL that is
8423 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8424 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8426 tree
8427 decl_type_context (const_tree decl)
8429 tree context = DECL_CONTEXT (decl);
8431 while (context)
8432 switch (TREE_CODE (context))
8434 case NAMESPACE_DECL:
8435 case TRANSLATION_UNIT_DECL:
8436 return NULL_TREE;
8438 case RECORD_TYPE:
8439 case UNION_TYPE:
8440 case QUAL_UNION_TYPE:
8441 return context;
8443 case TYPE_DECL:
8444 case FUNCTION_DECL:
8445 context = DECL_CONTEXT (context);
8446 break;
8448 case BLOCK:
8449 context = BLOCK_SUPERCONTEXT (context);
8450 break;
8452 default:
8453 gcc_unreachable ();
8456 return NULL_TREE;
8459 /* CALL is a CALL_EXPR. Return the declaration for the function
8460 called, or NULL_TREE if the called function cannot be
8461 determined. */
8463 tree
8464 get_callee_fndecl (const_tree call)
8466 tree addr;
8468 if (call == error_mark_node)
8469 return error_mark_node;
8471 /* It's invalid to call this function with anything but a
8472 CALL_EXPR. */
8473 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8475 /* The first operand to the CALL is the address of the function
8476 called. */
8477 addr = CALL_EXPR_FN (call);
8479 /* If there is no function, return early. */
8480 if (addr == NULL_TREE)
8481 return NULL_TREE;
8483 STRIP_NOPS (addr);
8485 /* If this is a readonly function pointer, extract its initial value. */
8486 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8487 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8488 && DECL_INITIAL (addr))
8489 addr = DECL_INITIAL (addr);
8491 /* If the address is just `&f' for some function `f', then we know
8492 that `f' is being called. */
8493 if (TREE_CODE (addr) == ADDR_EXPR
8494 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8495 return TREE_OPERAND (addr, 0);
8497 /* We couldn't figure out what was being called. */
8498 return NULL_TREE;
8501 /* Return true when STMTs arguments and return value match those of FNDECL,
8502 a decl of a builtin function. */
8504 static bool
8505 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8507 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8509 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8510 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8511 fndecl = decl;
8513 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8514 if (gimple_form
8515 ? !useless_type_conversion_p (TREE_TYPE (call),
8516 TREE_TYPE (TREE_TYPE (fndecl)))
8517 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8518 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8519 return false;
8521 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8522 unsigned nargs = call_expr_nargs (call);
8523 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8525 /* Variadic args follow. */
8526 if (!targs)
8527 return true;
8528 tree arg = CALL_EXPR_ARG (call, i);
8529 tree type = TREE_VALUE (targs);
8530 if (gimple_form
8531 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8532 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8534 /* For pointer arguments be more forgiving, e.g. due to
8535 FILE * vs. fileptr_type_node, or say char * vs. const char *
8536 differences etc. */
8537 if (!gimple_form
8538 && POINTER_TYPE_P (type)
8539 && POINTER_TYPE_P (TREE_TYPE (arg))
8540 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8541 continue;
8542 /* char/short integral arguments are promoted to int
8543 by several frontends if targetm.calls.promote_prototypes
8544 is true. Allow such promotion too. */
8545 if (INTEGRAL_TYPE_P (type)
8546 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8547 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8548 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8549 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8550 && (gimple_form
8551 ? useless_type_conversion_p (integer_type_node,
8552 TREE_TYPE (arg))
8553 : tree_nop_conversion_p (integer_type_node,
8554 TREE_TYPE (arg))))
8555 continue;
8556 return false;
8559 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8560 return false;
8561 return true;
8564 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8565 return the associated function code, otherwise return CFN_LAST. */
8567 combined_fn
8568 get_call_combined_fn (const_tree call)
8570 /* It's invalid to call this function with anything but a CALL_EXPR. */
8571 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8573 if (!CALL_EXPR_FN (call))
8574 return as_combined_fn (CALL_EXPR_IFN (call));
8576 tree fndecl = get_callee_fndecl (call);
8577 if (fndecl
8578 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8579 && tree_builtin_call_types_compatible_p (call, fndecl))
8580 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8582 return CFN_LAST;
8585 /* Comparator of indices based on tree_node_counts. */
8587 static int
8588 tree_nodes_cmp (const void *p1, const void *p2)
8590 const unsigned *n1 = (const unsigned *)p1;
8591 const unsigned *n2 = (const unsigned *)p2;
8593 return tree_node_counts[*n1] - tree_node_counts[*n2];
8596 /* Comparator of indices based on tree_code_counts. */
8598 static int
8599 tree_codes_cmp (const void *p1, const void *p2)
8601 const unsigned *n1 = (const unsigned *)p1;
8602 const unsigned *n2 = (const unsigned *)p2;
8604 return tree_code_counts[*n1] - tree_code_counts[*n2];
8607 #define TREE_MEM_USAGE_SPACES 40
8609 /* Print debugging information about tree nodes generated during the compile,
8610 and any language-specific information. */
8612 void
8613 dump_tree_statistics (void)
8615 if (GATHER_STATISTICS)
8617 uint64_t total_nodes, total_bytes;
8618 fprintf (stderr, "\nKind Nodes Bytes\n");
8619 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8620 total_nodes = total_bytes = 0;
8623 auto_vec<unsigned> indices (all_kinds);
8624 for (unsigned i = 0; i < all_kinds; i++)
8625 indices.quick_push (i);
8626 indices.qsort (tree_nodes_cmp);
8628 for (unsigned i = 0; i < (int) all_kinds; i++)
8630 unsigned j = indices[i];
8631 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8632 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8633 SIZE_AMOUNT (tree_node_sizes[j]));
8634 total_nodes += tree_node_counts[j];
8635 total_bytes += tree_node_sizes[j];
8637 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8638 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8639 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8640 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8644 fprintf (stderr, "Code Nodes\n");
8645 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8647 auto_vec<unsigned> indices (MAX_TREE_CODES);
8648 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8649 indices.quick_push (i);
8650 indices.qsort (tree_codes_cmp);
8652 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8654 unsigned j = indices[i];
8655 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8656 get_tree_code_name ((enum tree_code) j),
8657 SIZE_AMOUNT (tree_code_counts[j]));
8659 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8660 fprintf (stderr, "\n");
8661 ssanames_print_statistics ();
8662 fprintf (stderr, "\n");
8663 phinodes_print_statistics ();
8664 fprintf (stderr, "\n");
8667 else
8668 fprintf (stderr, "(No per-node statistics)\n");
8670 print_type_hash_statistics ();
8671 print_debug_expr_statistics ();
8672 print_value_expr_statistics ();
8673 lang_hooks.print_statistics ();
8676 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8678 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8680 unsigned
8681 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8683 /* This relies on the raw feedback's top 4 bits being zero. */
8684 #define FEEDBACK(X) ((X) * 0x04c11db7)
8685 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8686 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8687 static const unsigned syndromes[16] =
8689 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8690 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8691 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8692 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8694 #undef FEEDBACK
8695 #undef SYNDROME
8697 value <<= (32 - bytes * 8);
8698 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8700 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8702 chksum = (chksum << 4) ^ feedback;
8705 return chksum;
8708 /* Generate a crc32 of a string. */
8710 unsigned
8711 crc32_string (unsigned chksum, const char *string)
8714 chksum = crc32_byte (chksum, *string);
8715 while (*string++);
8716 return chksum;
8719 /* P is a string that will be used in a symbol. Mask out any characters
8720 that are not valid in that context. */
8722 void
8723 clean_symbol_name (char *p)
8725 for (; *p; p++)
8726 if (! (ISALNUM (*p)
8727 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8728 || *p == '$'
8729 #endif
8730 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8731 || *p == '.'
8732 #endif
8734 *p = '_';
8737 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8739 /* Create a unique anonymous identifier. The identifier is still a
8740 valid assembly label. */
8742 tree
8743 make_anon_name ()
8745 const char *fmt =
8746 #if !defined (NO_DOT_IN_LABEL)
8748 #elif !defined (NO_DOLLAR_IN_LABEL)
8750 #else
8752 #endif
8753 "_anon_%d";
8755 char buf[24];
8756 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8757 gcc_checking_assert (len < int (sizeof (buf)));
8759 tree id = get_identifier_with_length (buf, len);
8760 IDENTIFIER_ANON_P (id) = true;
8762 return id;
8765 /* Generate a name for a special-purpose function.
8766 The generated name may need to be unique across the whole link.
8767 Changes to this function may also require corresponding changes to
8768 xstrdup_mask_random.
8769 TYPE is some string to identify the purpose of this function to the
8770 linker or collect2; it must start with an uppercase letter,
8771 one of:
8772 I - for constructors
8773 D - for destructors
8774 N - for C++ anonymous namespaces
8775 F - for DWARF unwind frame information. */
8777 tree
8778 get_file_function_name (const char *type)
8780 char *buf;
8781 const char *p;
8782 char *q;
8784 /* If we already have a name we know to be unique, just use that. */
8785 if (first_global_object_name)
8786 p = q = ASTRDUP (first_global_object_name);
8787 /* If the target is handling the constructors/destructors, they
8788 will be local to this file and the name is only necessary for
8789 debugging purposes.
8790 We also assign sub_I and sub_D sufixes to constructors called from
8791 the global static constructors. These are always local. */
8792 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8793 || (startswith (type, "sub_")
8794 && (type[4] == 'I' || type[4] == 'D')))
8796 const char *file = main_input_filename;
8797 if (! file)
8798 file = LOCATION_FILE (input_location);
8799 /* Just use the file's basename, because the full pathname
8800 might be quite long. */
8801 p = q = ASTRDUP (lbasename (file));
8803 else
8805 /* Otherwise, the name must be unique across the entire link.
8806 We don't have anything that we know to be unique to this translation
8807 unit, so use what we do have and throw in some randomness. */
8808 unsigned len;
8809 const char *name = weak_global_object_name;
8810 const char *file = main_input_filename;
8812 if (! name)
8813 name = "";
8814 if (! file)
8815 file = LOCATION_FILE (input_location);
8817 len = strlen (file);
8818 q = (char *) alloca (9 + 19 + len + 1);
8819 memcpy (q, file, len + 1);
8821 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8822 crc32_string (0, name), get_random_seed (false));
8824 p = q;
8827 clean_symbol_name (q);
8828 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8829 + strlen (type));
8831 /* Set up the name of the file-level functions we may need.
8832 Use a global object (which is already required to be unique over
8833 the program) rather than the file name (which imposes extra
8834 constraints). */
8835 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8837 return get_identifier (buf);
8840 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8842 /* Complain that the tree code of NODE does not match the expected 0
8843 terminated list of trailing codes. The trailing code list can be
8844 empty, for a more vague error message. FILE, LINE, and FUNCTION
8845 are of the caller. */
8847 void
8848 tree_check_failed (const_tree node, const char *file,
8849 int line, const char *function, ...)
8851 va_list args;
8852 const char *buffer;
8853 unsigned length = 0;
8854 enum tree_code code;
8856 va_start (args, function);
8857 while ((code = (enum tree_code) va_arg (args, int)))
8858 length += 4 + strlen (get_tree_code_name (code));
8859 va_end (args);
8860 if (length)
8862 char *tmp;
8863 va_start (args, function);
8864 length += strlen ("expected ");
8865 buffer = tmp = (char *) alloca (length);
8866 length = 0;
8867 while ((code = (enum tree_code) va_arg (args, int)))
8869 const char *prefix = length ? " or " : "expected ";
8871 strcpy (tmp + length, prefix);
8872 length += strlen (prefix);
8873 strcpy (tmp + length, get_tree_code_name (code));
8874 length += strlen (get_tree_code_name (code));
8876 va_end (args);
8878 else
8879 buffer = "unexpected node";
8881 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8882 buffer, get_tree_code_name (TREE_CODE (node)),
8883 function, trim_filename (file), line);
8886 /* Complain that the tree code of NODE does match the expected 0
8887 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8888 the caller. */
8890 void
8891 tree_not_check_failed (const_tree node, const char *file,
8892 int line, const char *function, ...)
8894 va_list args;
8895 char *buffer;
8896 unsigned length = 0;
8897 enum tree_code code;
8899 va_start (args, function);
8900 while ((code = (enum tree_code) va_arg (args, int)))
8901 length += 4 + strlen (get_tree_code_name (code));
8902 va_end (args);
8903 va_start (args, function);
8904 buffer = (char *) alloca (length);
8905 length = 0;
8906 while ((code = (enum tree_code) va_arg (args, int)))
8908 if (length)
8910 strcpy (buffer + length, " or ");
8911 length += 4;
8913 strcpy (buffer + length, get_tree_code_name (code));
8914 length += strlen (get_tree_code_name (code));
8916 va_end (args);
8918 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8919 buffer, get_tree_code_name (TREE_CODE (node)),
8920 function, trim_filename (file), line);
8923 /* Similar to tree_check_failed, except that we check for a class of tree
8924 code, given in CL. */
8926 void
8927 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8928 const char *file, int line, const char *function)
8930 internal_error
8931 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8932 TREE_CODE_CLASS_STRING (cl),
8933 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8934 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8937 /* Similar to tree_check_failed, except that instead of specifying a
8938 dozen codes, use the knowledge that they're all sequential. */
8940 void
8941 tree_range_check_failed (const_tree node, const char *file, int line,
8942 const char *function, enum tree_code c1,
8943 enum tree_code c2)
8945 char *buffer;
8946 unsigned length = 0;
8947 unsigned int c;
8949 for (c = c1; c <= c2; ++c)
8950 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8952 length += strlen ("expected ");
8953 buffer = (char *) alloca (length);
8954 length = 0;
8956 for (c = c1; c <= c2; ++c)
8958 const char *prefix = length ? " or " : "expected ";
8960 strcpy (buffer + length, prefix);
8961 length += strlen (prefix);
8962 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8963 length += strlen (get_tree_code_name ((enum tree_code) c));
8966 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8967 buffer, get_tree_code_name (TREE_CODE (node)),
8968 function, trim_filename (file), line);
8972 /* Similar to tree_check_failed, except that we check that a tree does
8973 not have the specified code, given in CL. */
8975 void
8976 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8977 const char *file, int line, const char *function)
8979 internal_error
8980 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8981 TREE_CODE_CLASS_STRING (cl),
8982 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8983 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8987 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8989 void
8990 omp_clause_check_failed (const_tree node, const char *file, int line,
8991 const char *function, enum omp_clause_code code)
8993 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8994 "in %s, at %s:%d",
8995 omp_clause_code_name[code],
8996 get_tree_code_name (TREE_CODE (node)),
8997 function, trim_filename (file), line);
9001 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9003 void
9004 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9005 const char *function, enum omp_clause_code c1,
9006 enum omp_clause_code c2)
9008 char *buffer;
9009 unsigned length = 0;
9010 unsigned int c;
9012 for (c = c1; c <= c2; ++c)
9013 length += 4 + strlen (omp_clause_code_name[c]);
9015 length += strlen ("expected ");
9016 buffer = (char *) alloca (length);
9017 length = 0;
9019 for (c = c1; c <= c2; ++c)
9021 const char *prefix = length ? " or " : "expected ";
9023 strcpy (buffer + length, prefix);
9024 length += strlen (prefix);
9025 strcpy (buffer + length, omp_clause_code_name[c]);
9026 length += strlen (omp_clause_code_name[c]);
9029 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9030 buffer, omp_clause_code_name[TREE_CODE (node)],
9031 function, trim_filename (file), line);
9035 #undef DEFTREESTRUCT
9036 #define DEFTREESTRUCT(VAL, NAME) NAME,
9038 static const char *ts_enum_names[] = {
9039 #include "treestruct.def"
9041 #undef DEFTREESTRUCT
9043 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9045 /* Similar to tree_class_check_failed, except that we check for
9046 whether CODE contains the tree structure identified by EN. */
9048 void
9049 tree_contains_struct_check_failed (const_tree node,
9050 const enum tree_node_structure_enum en,
9051 const char *file, int line,
9052 const char *function)
9054 internal_error
9055 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9056 TS_ENUM_NAME (en),
9057 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9061 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9062 (dynamically sized) vector. */
9064 void
9065 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9066 const char *function)
9068 internal_error
9069 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9070 "at %s:%d",
9071 idx + 1, len, function, trim_filename (file), line);
9074 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9075 (dynamically sized) vector. */
9077 void
9078 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9079 const char *function)
9081 internal_error
9082 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9083 idx + 1, len, function, trim_filename (file), line);
9086 /* Similar to above, except that the check is for the bounds of the operand
9087 vector of an expression node EXP. */
9089 void
9090 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9091 int line, const char *function)
9093 enum tree_code code = TREE_CODE (exp);
9094 internal_error
9095 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9096 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9097 function, trim_filename (file), line);
9100 /* Similar to above, except that the check is for the number of
9101 operands of an OMP_CLAUSE node. */
9103 void
9104 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9105 int line, const char *function)
9107 internal_error
9108 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9109 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9110 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9111 trim_filename (file), line);
9113 #endif /* ENABLE_TREE_CHECKING */
9115 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9116 and mapped to the machine mode MODE. Initialize its fields and build
9117 the information necessary for debugging output. */
9119 static tree
9120 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9122 tree t;
9123 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9125 t = make_node (VECTOR_TYPE);
9126 TREE_TYPE (t) = mv_innertype;
9127 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9128 SET_TYPE_MODE (t, mode);
9130 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9131 SET_TYPE_STRUCTURAL_EQUALITY (t);
9132 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9133 || mode != VOIDmode)
9134 && !VECTOR_BOOLEAN_TYPE_P (t))
9135 TYPE_CANONICAL (t)
9136 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9138 layout_type (t);
9140 hashval_t hash = type_hash_canon_hash (t);
9141 t = type_hash_canon (hash, t);
9143 /* We have built a main variant, based on the main variant of the
9144 inner type. Use it to build the variant we return. */
9145 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9146 && TREE_TYPE (t) != innertype)
9147 return build_type_attribute_qual_variant (t,
9148 TYPE_ATTRIBUTES (innertype),
9149 TYPE_QUALS (innertype));
9151 return t;
9154 static tree
9155 make_or_reuse_type (unsigned size, int unsignedp)
9157 int i;
9159 if (size == INT_TYPE_SIZE)
9160 return unsignedp ? unsigned_type_node : integer_type_node;
9161 if (size == CHAR_TYPE_SIZE)
9162 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9163 if (size == SHORT_TYPE_SIZE)
9164 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9165 if (size == LONG_TYPE_SIZE)
9166 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9167 if (size == LONG_LONG_TYPE_SIZE)
9168 return (unsignedp ? long_long_unsigned_type_node
9169 : long_long_integer_type_node);
9171 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9172 if (size == int_n_data[i].bitsize
9173 && int_n_enabled_p[i])
9174 return (unsignedp ? int_n_trees[i].unsigned_type
9175 : int_n_trees[i].signed_type);
9177 if (unsignedp)
9178 return make_unsigned_type (size);
9179 else
9180 return make_signed_type (size);
9183 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9185 static tree
9186 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9188 if (satp)
9190 if (size == SHORT_FRACT_TYPE_SIZE)
9191 return unsignedp ? sat_unsigned_short_fract_type_node
9192 : sat_short_fract_type_node;
9193 if (size == FRACT_TYPE_SIZE)
9194 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9195 if (size == LONG_FRACT_TYPE_SIZE)
9196 return unsignedp ? sat_unsigned_long_fract_type_node
9197 : sat_long_fract_type_node;
9198 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9199 return unsignedp ? sat_unsigned_long_long_fract_type_node
9200 : sat_long_long_fract_type_node;
9202 else
9204 if (size == SHORT_FRACT_TYPE_SIZE)
9205 return unsignedp ? unsigned_short_fract_type_node
9206 : short_fract_type_node;
9207 if (size == FRACT_TYPE_SIZE)
9208 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9209 if (size == LONG_FRACT_TYPE_SIZE)
9210 return unsignedp ? unsigned_long_fract_type_node
9211 : long_fract_type_node;
9212 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9213 return unsignedp ? unsigned_long_long_fract_type_node
9214 : long_long_fract_type_node;
9217 return make_fract_type (size, unsignedp, satp);
9220 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9222 static tree
9223 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9225 if (satp)
9227 if (size == SHORT_ACCUM_TYPE_SIZE)
9228 return unsignedp ? sat_unsigned_short_accum_type_node
9229 : sat_short_accum_type_node;
9230 if (size == ACCUM_TYPE_SIZE)
9231 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9232 if (size == LONG_ACCUM_TYPE_SIZE)
9233 return unsignedp ? sat_unsigned_long_accum_type_node
9234 : sat_long_accum_type_node;
9235 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9236 return unsignedp ? sat_unsigned_long_long_accum_type_node
9237 : sat_long_long_accum_type_node;
9239 else
9241 if (size == SHORT_ACCUM_TYPE_SIZE)
9242 return unsignedp ? unsigned_short_accum_type_node
9243 : short_accum_type_node;
9244 if (size == ACCUM_TYPE_SIZE)
9245 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9246 if (size == LONG_ACCUM_TYPE_SIZE)
9247 return unsignedp ? unsigned_long_accum_type_node
9248 : long_accum_type_node;
9249 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9250 return unsignedp ? unsigned_long_long_accum_type_node
9251 : long_long_accum_type_node;
9254 return make_accum_type (size, unsignedp, satp);
9258 /* Create an atomic variant node for TYPE. This routine is called
9259 during initialization of data types to create the 5 basic atomic
9260 types. The generic build_variant_type function requires these to
9261 already be set up in order to function properly, so cannot be
9262 called from there. If ALIGN is non-zero, then ensure alignment is
9263 overridden to this value. */
9265 static tree
9266 build_atomic_base (tree type, unsigned int align)
9268 tree t;
9270 /* Make sure its not already registered. */
9271 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9272 return t;
9274 t = build_variant_type_copy (type);
9275 set_type_quals (t, TYPE_QUAL_ATOMIC);
9277 if (align)
9278 SET_TYPE_ALIGN (t, align);
9280 return t;
9283 /* Information about the _FloatN and _FloatNx types. This must be in
9284 the same order as the corresponding TI_* enum values. */
9285 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9287 { 16, false },
9288 { 32, false },
9289 { 64, false },
9290 { 128, false },
9291 { 32, true },
9292 { 64, true },
9293 { 128, true },
9297 /* Create nodes for all integer types (and error_mark_node) using the sizes
9298 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9300 void
9301 build_common_tree_nodes (bool signed_char)
9303 int i;
9305 error_mark_node = make_node (ERROR_MARK);
9306 TREE_TYPE (error_mark_node) = error_mark_node;
9308 initialize_sizetypes ();
9310 /* Define both `signed char' and `unsigned char'. */
9311 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9312 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9313 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9314 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9316 /* Define `char', which is like either `signed char' or `unsigned char'
9317 but not the same as either. */
9318 char_type_node
9319 = (signed_char
9320 ? make_signed_type (CHAR_TYPE_SIZE)
9321 : make_unsigned_type (CHAR_TYPE_SIZE));
9322 TYPE_STRING_FLAG (char_type_node) = 1;
9324 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9325 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9326 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9327 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9328 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9329 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9330 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9331 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9333 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9335 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9336 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9338 if (int_n_enabled_p[i])
9340 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9341 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9345 /* Define a boolean type. This type only represents boolean values but
9346 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9347 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9348 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9349 TYPE_PRECISION (boolean_type_node) = 1;
9350 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9352 /* Define what type to use for size_t. */
9353 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9354 size_type_node = unsigned_type_node;
9355 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9356 size_type_node = long_unsigned_type_node;
9357 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9358 size_type_node = long_long_unsigned_type_node;
9359 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9360 size_type_node = short_unsigned_type_node;
9361 else
9363 int i;
9365 size_type_node = NULL_TREE;
9366 for (i = 0; i < NUM_INT_N_ENTS; i++)
9367 if (int_n_enabled_p[i])
9369 char name[50], altname[50];
9370 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9371 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9373 if (strcmp (name, SIZE_TYPE) == 0
9374 || strcmp (altname, SIZE_TYPE) == 0)
9376 size_type_node = int_n_trees[i].unsigned_type;
9379 if (size_type_node == NULL_TREE)
9380 gcc_unreachable ();
9383 /* Define what type to use for ptrdiff_t. */
9384 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9385 ptrdiff_type_node = integer_type_node;
9386 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9387 ptrdiff_type_node = long_integer_type_node;
9388 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9389 ptrdiff_type_node = long_long_integer_type_node;
9390 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9391 ptrdiff_type_node = short_integer_type_node;
9392 else
9394 ptrdiff_type_node = NULL_TREE;
9395 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9396 if (int_n_enabled_p[i])
9398 char name[50], altname[50];
9399 sprintf (name, "__int%d", int_n_data[i].bitsize);
9400 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9402 if (strcmp (name, PTRDIFF_TYPE) == 0
9403 || strcmp (altname, PTRDIFF_TYPE) == 0)
9404 ptrdiff_type_node = int_n_trees[i].signed_type;
9406 if (ptrdiff_type_node == NULL_TREE)
9407 gcc_unreachable ();
9410 /* Fill in the rest of the sized types. Reuse existing type nodes
9411 when possible. */
9412 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9413 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9414 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9415 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9416 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9418 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9419 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9420 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9421 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9422 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9424 /* Don't call build_qualified type for atomics. That routine does
9425 special processing for atomics, and until they are initialized
9426 it's better not to make that call.
9428 Check to see if there is a target override for atomic types. */
9430 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9431 targetm.atomic_align_for_mode (QImode));
9432 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9433 targetm.atomic_align_for_mode (HImode));
9434 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9435 targetm.atomic_align_for_mode (SImode));
9436 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9437 targetm.atomic_align_for_mode (DImode));
9438 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9439 targetm.atomic_align_for_mode (TImode));
9441 access_public_node = get_identifier ("public");
9442 access_protected_node = get_identifier ("protected");
9443 access_private_node = get_identifier ("private");
9445 /* Define these next since types below may used them. */
9446 integer_zero_node = build_int_cst (integer_type_node, 0);
9447 integer_one_node = build_int_cst (integer_type_node, 1);
9448 integer_three_node = build_int_cst (integer_type_node, 3);
9449 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9451 size_zero_node = size_int (0);
9452 size_one_node = size_int (1);
9453 bitsize_zero_node = bitsize_int (0);
9454 bitsize_one_node = bitsize_int (1);
9455 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9457 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9458 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9460 void_type_node = make_node (VOID_TYPE);
9461 layout_type (void_type_node);
9463 /* We are not going to have real types in C with less than byte alignment,
9464 so we might as well not have any types that claim to have it. */
9465 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9466 TYPE_USER_ALIGN (void_type_node) = 0;
9468 void_node = make_node (VOID_CST);
9469 TREE_TYPE (void_node) = void_type_node;
9471 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9473 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9474 layout_type (TREE_TYPE (null_pointer_node));
9476 ptr_type_node = build_pointer_type (void_type_node);
9477 const_ptr_type_node
9478 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9479 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9480 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9482 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9484 float_type_node = make_node (REAL_TYPE);
9485 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9486 layout_type (float_type_node);
9488 double_type_node = make_node (REAL_TYPE);
9489 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9490 layout_type (double_type_node);
9492 long_double_type_node = make_node (REAL_TYPE);
9493 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9494 layout_type (long_double_type_node);
9496 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9498 int n = floatn_nx_types[i].n;
9499 bool extended = floatn_nx_types[i].extended;
9500 scalar_float_mode mode;
9501 if (!targetm.floatn_mode (n, extended).exists (&mode))
9502 continue;
9503 int precision = GET_MODE_PRECISION (mode);
9504 /* Work around the rs6000 KFmode having precision 113 not
9505 128. */
9506 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9507 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9508 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9509 if (!extended)
9510 gcc_assert (min_precision == n);
9511 if (precision < min_precision)
9512 precision = min_precision;
9513 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9514 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9515 layout_type (FLOATN_NX_TYPE_NODE (i));
9516 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9518 float128t_type_node = float128_type_node;
9519 #ifdef HAVE_BFmode
9520 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9521 && targetm.scalar_mode_supported_p (BFmode)
9522 && targetm.libgcc_floating_mode_supported_p (BFmode))
9524 bfloat16_type_node = make_node (REAL_TYPE);
9525 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9526 layout_type (bfloat16_type_node);
9527 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9529 #endif
9531 float_ptr_type_node = build_pointer_type (float_type_node);
9532 double_ptr_type_node = build_pointer_type (double_type_node);
9533 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9534 integer_ptr_type_node = build_pointer_type (integer_type_node);
9536 /* Fixed size integer types. */
9537 uint16_type_node = make_or_reuse_type (16, 1);
9538 uint32_type_node = make_or_reuse_type (32, 1);
9539 uint64_type_node = make_or_reuse_type (64, 1);
9540 if (targetm.scalar_mode_supported_p (TImode))
9541 uint128_type_node = make_or_reuse_type (128, 1);
9543 /* Decimal float types. */
9544 if (targetm.decimal_float_supported_p ())
9546 dfloat32_type_node = make_node (REAL_TYPE);
9547 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9548 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9549 layout_type (dfloat32_type_node);
9551 dfloat64_type_node = make_node (REAL_TYPE);
9552 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9553 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9554 layout_type (dfloat64_type_node);
9556 dfloat128_type_node = make_node (REAL_TYPE);
9557 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9558 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9559 layout_type (dfloat128_type_node);
9562 complex_integer_type_node = build_complex_type (integer_type_node, true);
9563 complex_float_type_node = build_complex_type (float_type_node, true);
9564 complex_double_type_node = build_complex_type (double_type_node, true);
9565 complex_long_double_type_node = build_complex_type (long_double_type_node,
9566 true);
9568 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9570 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9571 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9572 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9575 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9576 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9577 sat_ ## KIND ## _type_node = \
9578 make_sat_signed_ ## KIND ## _type (SIZE); \
9579 sat_unsigned_ ## KIND ## _type_node = \
9580 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9581 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9582 unsigned_ ## KIND ## _type_node = \
9583 make_unsigned_ ## KIND ## _type (SIZE);
9585 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9586 sat_ ## WIDTH ## KIND ## _type_node = \
9587 make_sat_signed_ ## KIND ## _type (SIZE); \
9588 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9589 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9590 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9591 unsigned_ ## WIDTH ## KIND ## _type_node = \
9592 make_unsigned_ ## KIND ## _type (SIZE);
9594 /* Make fixed-point type nodes based on four different widths. */
9595 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9596 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9597 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9598 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9599 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9601 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9602 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9603 NAME ## _type_node = \
9604 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9605 u ## NAME ## _type_node = \
9606 make_or_reuse_unsigned_ ## KIND ## _type \
9607 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9608 sat_ ## NAME ## _type_node = \
9609 make_or_reuse_sat_signed_ ## KIND ## _type \
9610 (GET_MODE_BITSIZE (MODE ## mode)); \
9611 sat_u ## NAME ## _type_node = \
9612 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9613 (GET_MODE_BITSIZE (U ## MODE ## mode));
9615 /* Fixed-point type and mode nodes. */
9616 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9617 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9618 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9619 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9620 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9621 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9622 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9623 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9624 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9625 MAKE_FIXED_MODE_NODE (accum, da, DA)
9626 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9629 tree t = targetm.build_builtin_va_list ();
9631 /* Many back-ends define record types without setting TYPE_NAME.
9632 If we copied the record type here, we'd keep the original
9633 record type without a name. This breaks name mangling. So,
9634 don't copy record types and let c_common_nodes_and_builtins()
9635 declare the type to be __builtin_va_list. */
9636 if (TREE_CODE (t) != RECORD_TYPE)
9637 t = build_variant_type_copy (t);
9639 va_list_type_node = t;
9642 /* SCEV analyzer global shared trees. */
9643 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9644 TREE_TYPE (chrec_dont_know) = void_type_node;
9645 chrec_known = make_node (SCEV_KNOWN);
9646 TREE_TYPE (chrec_known) = void_type_node;
9649 /* Modify DECL for given flags.
9650 TM_PURE attribute is set only on types, so the function will modify
9651 DECL's type when ECF_TM_PURE is used. */
9653 void
9654 set_call_expr_flags (tree decl, int flags)
9656 if (flags & ECF_NOTHROW)
9657 TREE_NOTHROW (decl) = 1;
9658 if (flags & ECF_CONST)
9659 TREE_READONLY (decl) = 1;
9660 if (flags & ECF_PURE)
9661 DECL_PURE_P (decl) = 1;
9662 if (flags & ECF_LOOPING_CONST_OR_PURE)
9663 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9664 if (flags & ECF_NOVOPS)
9665 DECL_IS_NOVOPS (decl) = 1;
9666 if (flags & ECF_NORETURN)
9667 TREE_THIS_VOLATILE (decl) = 1;
9668 if (flags & ECF_MALLOC)
9669 DECL_IS_MALLOC (decl) = 1;
9670 if (flags & ECF_RETURNS_TWICE)
9671 DECL_IS_RETURNS_TWICE (decl) = 1;
9672 if (flags & ECF_LEAF)
9673 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9674 NULL, DECL_ATTRIBUTES (decl));
9675 if (flags & ECF_COLD)
9676 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9677 NULL, DECL_ATTRIBUTES (decl));
9678 if (flags & ECF_RET1)
9679 DECL_ATTRIBUTES (decl)
9680 = tree_cons (get_identifier ("fn spec"),
9681 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9682 DECL_ATTRIBUTES (decl));
9683 if ((flags & ECF_TM_PURE) && flag_tm)
9684 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9685 /* Looping const or pure is implied by noreturn.
9686 There is currently no way to declare looping const or looping pure alone. */
9687 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9688 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9692 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9694 static void
9695 local_define_builtin (const char *name, tree type, enum built_in_function code,
9696 const char *library_name, int ecf_flags)
9698 tree decl;
9700 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9701 library_name, NULL_TREE);
9702 set_call_expr_flags (decl, ecf_flags);
9704 set_builtin_decl (code, decl, true);
9707 /* Call this function after instantiating all builtins that the language
9708 front end cares about. This will build the rest of the builtins
9709 and internal functions that are relied upon by the tree optimizers and
9710 the middle-end. */
9712 void
9713 build_common_builtin_nodes (void)
9715 tree tmp, ftype;
9716 int ecf_flags;
9718 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9720 ftype = build_function_type_list (void_type_node,
9721 ptr_type_node,
9722 ptr_type_node,
9723 integer_type_node,
9724 NULL_TREE);
9725 local_define_builtin ("__builtin_clear_padding", ftype,
9726 BUILT_IN_CLEAR_PADDING,
9727 "__builtin_clear_padding",
9728 ECF_LEAF | ECF_NOTHROW);
9731 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9732 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9733 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9734 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9736 ftype = build_function_type (void_type_node, void_list_node);
9737 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9738 local_define_builtin ("__builtin_unreachable", ftype,
9739 BUILT_IN_UNREACHABLE,
9740 "__builtin_unreachable",
9741 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9742 | ECF_CONST | ECF_COLD);
9743 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9744 local_define_builtin ("__builtin_unreachable trap", ftype,
9745 BUILT_IN_UNREACHABLE_TRAP,
9746 "__builtin_unreachable trap",
9747 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9748 | ECF_CONST | ECF_COLD);
9749 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9750 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9751 "abort",
9752 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9753 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9754 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9755 "__builtin_trap",
9756 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9759 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9760 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9762 ftype = build_function_type_list (ptr_type_node,
9763 ptr_type_node, const_ptr_type_node,
9764 size_type_node, NULL_TREE);
9766 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9767 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9768 "memcpy", ECF_NOTHROW | ECF_LEAF);
9769 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9770 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9771 "memmove", ECF_NOTHROW | ECF_LEAF);
9774 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9776 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9777 const_ptr_type_node, size_type_node,
9778 NULL_TREE);
9779 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9780 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9783 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9785 ftype = build_function_type_list (ptr_type_node,
9786 ptr_type_node, integer_type_node,
9787 size_type_node, NULL_TREE);
9788 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9789 "memset", ECF_NOTHROW | ECF_LEAF);
9792 /* If we're checking the stack, `alloca' can throw. */
9793 const int alloca_flags
9794 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9796 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9798 ftype = build_function_type_list (ptr_type_node,
9799 size_type_node, NULL_TREE);
9800 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9801 "alloca", alloca_flags);
9804 ftype = build_function_type_list (ptr_type_node, size_type_node,
9805 size_type_node, NULL_TREE);
9806 local_define_builtin ("__builtin_alloca_with_align", ftype,
9807 BUILT_IN_ALLOCA_WITH_ALIGN,
9808 "__builtin_alloca_with_align",
9809 alloca_flags);
9811 ftype = build_function_type_list (ptr_type_node, size_type_node,
9812 size_type_node, size_type_node, NULL_TREE);
9813 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9814 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9815 "__builtin_alloca_with_align_and_max",
9816 alloca_flags);
9818 ftype = build_function_type_list (void_type_node,
9819 ptr_type_node, ptr_type_node,
9820 ptr_type_node, NULL_TREE);
9821 local_define_builtin ("__builtin_init_trampoline", ftype,
9822 BUILT_IN_INIT_TRAMPOLINE,
9823 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9824 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9825 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9826 "__builtin_init_heap_trampoline",
9827 ECF_NOTHROW | ECF_LEAF);
9828 local_define_builtin ("__builtin_init_descriptor", ftype,
9829 BUILT_IN_INIT_DESCRIPTOR,
9830 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9832 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9833 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9834 BUILT_IN_ADJUST_TRAMPOLINE,
9835 "__builtin_adjust_trampoline",
9836 ECF_CONST | ECF_NOTHROW);
9837 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9838 BUILT_IN_ADJUST_DESCRIPTOR,
9839 "__builtin_adjust_descriptor",
9840 ECF_CONST | ECF_NOTHROW);
9842 ftype = build_function_type_list (void_type_node,
9843 ptr_type_node, ptr_type_node, NULL_TREE);
9844 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9845 local_define_builtin ("__builtin___clear_cache", ftype,
9846 BUILT_IN_CLEAR_CACHE,
9847 "__clear_cache",
9848 ECF_NOTHROW);
9850 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9851 BUILT_IN_NONLOCAL_GOTO,
9852 "__builtin_nonlocal_goto",
9853 ECF_NORETURN | ECF_NOTHROW);
9855 ftype = build_function_type_list (void_type_node,
9856 ptr_type_node, ptr_type_node, NULL_TREE);
9857 local_define_builtin ("__builtin_setjmp_setup", ftype,
9858 BUILT_IN_SETJMP_SETUP,
9859 "__builtin_setjmp_setup", ECF_NOTHROW);
9861 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9862 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9863 BUILT_IN_SETJMP_RECEIVER,
9864 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9866 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9867 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9868 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9870 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9871 local_define_builtin ("__builtin_stack_restore", ftype,
9872 BUILT_IN_STACK_RESTORE,
9873 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9875 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9876 const_ptr_type_node, size_type_node,
9877 NULL_TREE);
9878 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9879 "__builtin_memcmp_eq",
9880 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9882 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9883 "__builtin_strncmp_eq",
9884 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9886 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9887 "__builtin_strcmp_eq",
9888 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9890 /* If there's a possibility that we might use the ARM EABI, build the
9891 alternate __cxa_end_cleanup node used to resume from C++. */
9892 if (targetm.arm_eabi_unwinder)
9894 ftype = build_function_type_list (void_type_node, NULL_TREE);
9895 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9896 BUILT_IN_CXA_END_CLEANUP,
9897 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9900 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9901 local_define_builtin ("__builtin_unwind_resume", ftype,
9902 BUILT_IN_UNWIND_RESUME,
9903 ((targetm_common.except_unwind_info (&global_options)
9904 == UI_SJLJ)
9905 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9906 ECF_NORETURN);
9908 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9910 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9911 NULL_TREE);
9912 local_define_builtin ("__builtin_return_address", ftype,
9913 BUILT_IN_RETURN_ADDRESS,
9914 "__builtin_return_address",
9915 ECF_NOTHROW);
9918 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9919 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9921 ftype = build_function_type_list (void_type_node, ptr_type_node,
9922 ptr_type_node, NULL_TREE);
9923 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9924 local_define_builtin ("__cyg_profile_func_enter", ftype,
9925 BUILT_IN_PROFILE_FUNC_ENTER,
9926 "__cyg_profile_func_enter", 0);
9927 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9928 local_define_builtin ("__cyg_profile_func_exit", ftype,
9929 BUILT_IN_PROFILE_FUNC_EXIT,
9930 "__cyg_profile_func_exit", 0);
9933 /* The exception object and filter values from the runtime. The argument
9934 must be zero before exception lowering, i.e. from the front end. After
9935 exception lowering, it will be the region number for the exception
9936 landing pad. These functions are PURE instead of CONST to prevent
9937 them from being hoisted past the exception edge that will initialize
9938 its value in the landing pad. */
9939 ftype = build_function_type_list (ptr_type_node,
9940 integer_type_node, NULL_TREE);
9941 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9942 /* Only use TM_PURE if we have TM language support. */
9943 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9944 ecf_flags |= ECF_TM_PURE;
9945 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9946 "__builtin_eh_pointer", ecf_flags);
9948 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9949 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9950 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9951 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9953 ftype = build_function_type_list (void_type_node,
9954 integer_type_node, integer_type_node,
9955 NULL_TREE);
9956 local_define_builtin ("__builtin_eh_copy_values", ftype,
9957 BUILT_IN_EH_COPY_VALUES,
9958 "__builtin_eh_copy_values", ECF_NOTHROW);
9960 /* Complex multiplication and division. These are handled as builtins
9961 rather than optabs because emit_library_call_value doesn't support
9962 complex. Further, we can do slightly better with folding these
9963 beasties if the real and complex parts of the arguments are separate. */
9965 int mode;
9967 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9969 char mode_name_buf[4], *q;
9970 const char *p;
9971 enum built_in_function mcode, dcode;
9972 tree type, inner_type;
9973 const char *prefix = "__";
9975 if (targetm.libfunc_gnu_prefix)
9976 prefix = "__gnu_";
9978 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9979 if (type == NULL)
9980 continue;
9981 inner_type = TREE_TYPE (type);
9983 ftype = build_function_type_list (type, inner_type, inner_type,
9984 inner_type, inner_type, NULL_TREE);
9986 mcode = ((enum built_in_function)
9987 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9988 dcode = ((enum built_in_function)
9989 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9991 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9992 *q = TOLOWER (*p);
9993 *q = '\0';
9995 /* For -ftrapping-math these should throw from a former
9996 -fnon-call-exception stmt. */
9997 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9998 NULL);
9999 local_define_builtin (built_in_names[mcode], ftype, mcode,
10000 built_in_names[mcode],
10001 ECF_CONST | ECF_LEAF);
10003 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10004 NULL);
10005 local_define_builtin (built_in_names[dcode], ftype, dcode,
10006 built_in_names[dcode],
10007 ECF_CONST | ECF_LEAF);
10011 init_internal_fns ();
10014 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10015 better way.
10017 If we requested a pointer to a vector, build up the pointers that
10018 we stripped off while looking for the inner type. Similarly for
10019 return values from functions.
10021 The argument TYPE is the top of the chain, and BOTTOM is the
10022 new type which we will point to. */
10024 tree
10025 reconstruct_complex_type (tree type, tree bottom)
10027 tree inner, outer;
10029 if (TREE_CODE (type) == POINTER_TYPE)
10031 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10032 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10033 TYPE_REF_CAN_ALIAS_ALL (type));
10035 else if (TREE_CODE (type) == REFERENCE_TYPE)
10037 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10038 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10039 TYPE_REF_CAN_ALIAS_ALL (type));
10041 else if (TREE_CODE (type) == ARRAY_TYPE)
10043 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10044 outer = build_array_type (inner, TYPE_DOMAIN (type));
10046 else if (TREE_CODE (type) == FUNCTION_TYPE)
10048 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10049 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10050 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10052 else if (TREE_CODE (type) == METHOD_TYPE)
10054 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10055 /* The build_method_type_directly() routine prepends 'this' to argument list,
10056 so we must compensate by getting rid of it. */
10057 outer
10058 = build_method_type_directly
10059 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10060 inner,
10061 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10063 else if (TREE_CODE (type) == OFFSET_TYPE)
10065 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10066 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10068 else
10069 return bottom;
10071 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10072 TYPE_QUALS (type));
10075 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10076 the inner type. */
10077 tree
10078 build_vector_type_for_mode (tree innertype, machine_mode mode)
10080 poly_int64 nunits;
10081 unsigned int bitsize;
10083 switch (GET_MODE_CLASS (mode))
10085 case MODE_VECTOR_BOOL:
10086 case MODE_VECTOR_INT:
10087 case MODE_VECTOR_FLOAT:
10088 case MODE_VECTOR_FRACT:
10089 case MODE_VECTOR_UFRACT:
10090 case MODE_VECTOR_ACCUM:
10091 case MODE_VECTOR_UACCUM:
10092 nunits = GET_MODE_NUNITS (mode);
10093 break;
10095 case MODE_INT:
10096 /* Check that there are no leftover bits. */
10097 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10098 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10099 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10100 break;
10102 default:
10103 gcc_unreachable ();
10106 return make_vector_type (innertype, nunits, mode);
10109 /* Similarly, but takes the inner type and number of units, which must be
10110 a power of two. */
10112 tree
10113 build_vector_type (tree innertype, poly_int64 nunits)
10115 return make_vector_type (innertype, nunits, VOIDmode);
10118 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10120 tree
10121 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10123 gcc_assert (mask_mode != BLKmode);
10125 unsigned HOST_WIDE_INT esize;
10126 if (VECTOR_MODE_P (mask_mode))
10128 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10129 esize = vector_element_size (vsize, nunits);
10131 else
10132 esize = 1;
10134 tree bool_type = build_nonstandard_boolean_type (esize);
10136 return make_vector_type (bool_type, nunits, mask_mode);
10139 /* Build a vector type that holds one boolean result for each element of
10140 vector type VECTYPE. The public interface for this operation is
10141 truth_type_for. */
10143 static tree
10144 build_truth_vector_type_for (tree vectype)
10146 machine_mode vector_mode = TYPE_MODE (vectype);
10147 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10149 machine_mode mask_mode;
10150 if (VECTOR_MODE_P (vector_mode)
10151 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10152 return build_truth_vector_type_for_mode (nunits, mask_mode);
10154 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10155 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10156 tree bool_type = build_nonstandard_boolean_type (esize);
10158 return make_vector_type (bool_type, nunits, VOIDmode);
10161 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10162 set. */
10164 tree
10165 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10167 tree t = make_vector_type (innertype, nunits, VOIDmode);
10168 tree cand;
10169 /* We always build the non-opaque variant before the opaque one,
10170 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10171 cand = TYPE_NEXT_VARIANT (t);
10172 if (cand
10173 && TYPE_VECTOR_OPAQUE (cand)
10174 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10175 return cand;
10176 /* Othewise build a variant type and make sure to queue it after
10177 the non-opaque type. */
10178 cand = build_distinct_type_copy (t);
10179 TYPE_VECTOR_OPAQUE (cand) = true;
10180 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10181 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10182 TYPE_NEXT_VARIANT (t) = cand;
10183 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10184 return cand;
10187 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10189 static poly_wide_int
10190 vector_cst_int_elt (const_tree t, unsigned int i)
10192 /* First handle elements that are directly encoded. */
10193 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10194 if (i < encoded_nelts)
10195 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10197 /* Identify the pattern that contains element I and work out the index of
10198 the last encoded element for that pattern. */
10199 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10200 unsigned int pattern = i % npatterns;
10201 unsigned int count = i / npatterns;
10202 unsigned int final_i = encoded_nelts - npatterns + pattern;
10204 /* If there are no steps, the final encoded value is the right one. */
10205 if (!VECTOR_CST_STEPPED_P (t))
10206 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10208 /* Otherwise work out the value from the last two encoded elements. */
10209 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10210 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10211 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10212 return wi::to_poly_wide (v2) + (count - 2) * diff;
10215 /* Return the value of element I of VECTOR_CST T. */
10217 tree
10218 vector_cst_elt (const_tree t, unsigned int i)
10220 /* First handle elements that are directly encoded. */
10221 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10222 if (i < encoded_nelts)
10223 return VECTOR_CST_ENCODED_ELT (t, i);
10225 /* If there are no steps, the final encoded value is the right one. */
10226 if (!VECTOR_CST_STEPPED_P (t))
10228 /* Identify the pattern that contains element I and work out the index of
10229 the last encoded element for that pattern. */
10230 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10231 unsigned int pattern = i % npatterns;
10232 unsigned int final_i = encoded_nelts - npatterns + pattern;
10233 return VECTOR_CST_ENCODED_ELT (t, final_i);
10236 /* Otherwise work out the value from the last two encoded elements. */
10237 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10238 vector_cst_int_elt (t, i));
10241 /* Given an initializer INIT, return TRUE if INIT is zero or some
10242 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10243 null, set *NONZERO if and only if INIT is known not to be all
10244 zeros. The combination of return value of false and *NONZERO
10245 false implies that INIT may but need not be all zeros. Other
10246 combinations indicate definitive answers. */
10248 bool
10249 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10251 bool dummy;
10252 if (!nonzero)
10253 nonzero = &dummy;
10255 /* Conservatively clear NONZERO and set it only if INIT is definitely
10256 not all zero. */
10257 *nonzero = false;
10259 STRIP_NOPS (init);
10261 unsigned HOST_WIDE_INT off = 0;
10263 switch (TREE_CODE (init))
10265 case INTEGER_CST:
10266 if (integer_zerop (init))
10267 return true;
10269 *nonzero = true;
10270 return false;
10272 case REAL_CST:
10273 /* ??? Note that this is not correct for C4X float formats. There,
10274 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10275 negative exponent. */
10276 if (real_zerop (init)
10277 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10278 return true;
10280 *nonzero = true;
10281 return false;
10283 case FIXED_CST:
10284 if (fixed_zerop (init))
10285 return true;
10287 *nonzero = true;
10288 return false;
10290 case COMPLEX_CST:
10291 if (integer_zerop (init)
10292 || (real_zerop (init)
10293 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10294 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10295 return true;
10297 *nonzero = true;
10298 return false;
10300 case VECTOR_CST:
10301 if (VECTOR_CST_NPATTERNS (init) == 1
10302 && VECTOR_CST_DUPLICATE_P (init)
10303 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10304 return true;
10306 *nonzero = true;
10307 return false;
10309 case CONSTRUCTOR:
10311 if (TREE_CLOBBER_P (init))
10312 return false;
10314 unsigned HOST_WIDE_INT idx;
10315 tree elt;
10317 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10318 if (!initializer_zerop (elt, nonzero))
10319 return false;
10321 return true;
10324 case MEM_REF:
10326 tree arg = TREE_OPERAND (init, 0);
10327 if (TREE_CODE (arg) != ADDR_EXPR)
10328 return false;
10329 tree offset = TREE_OPERAND (init, 1);
10330 if (TREE_CODE (offset) != INTEGER_CST
10331 || !tree_fits_uhwi_p (offset))
10332 return false;
10333 off = tree_to_uhwi (offset);
10334 if (INT_MAX < off)
10335 return false;
10336 arg = TREE_OPERAND (arg, 0);
10337 if (TREE_CODE (arg) != STRING_CST)
10338 return false;
10339 init = arg;
10341 /* Fall through. */
10343 case STRING_CST:
10345 gcc_assert (off <= INT_MAX);
10347 int i = off;
10348 int n = TREE_STRING_LENGTH (init);
10349 if (n <= i)
10350 return false;
10352 /* We need to loop through all elements to handle cases like
10353 "\0" and "\0foobar". */
10354 for (i = 0; i < n; ++i)
10355 if (TREE_STRING_POINTER (init)[i] != '\0')
10357 *nonzero = true;
10358 return false;
10361 return true;
10364 default:
10365 return false;
10369 /* Return true if EXPR is an initializer expression in which every element
10370 is a constant that is numerically equal to 0 or 1. The elements do not
10371 need to be equal to each other. */
10373 bool
10374 initializer_each_zero_or_onep (const_tree expr)
10376 STRIP_ANY_LOCATION_WRAPPER (expr);
10378 switch (TREE_CODE (expr))
10380 case INTEGER_CST:
10381 return integer_zerop (expr) || integer_onep (expr);
10383 case REAL_CST:
10384 return real_zerop (expr) || real_onep (expr);
10386 case VECTOR_CST:
10388 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10389 if (VECTOR_CST_STEPPED_P (expr)
10390 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10391 return false;
10393 for (unsigned int i = 0; i < nelts; ++i)
10395 tree elt = vector_cst_elt (expr, i);
10396 if (!initializer_each_zero_or_onep (elt))
10397 return false;
10400 return true;
10403 default:
10404 return false;
10408 /* Check if vector VEC consists of all the equal elements and
10409 that the number of elements corresponds to the type of VEC.
10410 The function returns first element of the vector
10411 or NULL_TREE if the vector is not uniform. */
10412 tree
10413 uniform_vector_p (const_tree vec)
10415 tree first, t;
10416 unsigned HOST_WIDE_INT i, nelts;
10418 if (vec == NULL_TREE)
10419 return NULL_TREE;
10421 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10423 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10424 return TREE_OPERAND (vec, 0);
10426 else if (TREE_CODE (vec) == VECTOR_CST)
10428 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10429 return VECTOR_CST_ENCODED_ELT (vec, 0);
10430 return NULL_TREE;
10433 else if (TREE_CODE (vec) == CONSTRUCTOR
10434 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10436 first = error_mark_node;
10438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10440 if (i == 0)
10442 first = t;
10443 continue;
10445 if (!operand_equal_p (first, t, 0))
10446 return NULL_TREE;
10448 if (i != nelts)
10449 return NULL_TREE;
10451 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10452 return uniform_vector_p (first);
10453 return first;
10456 return NULL_TREE;
10459 /* If the argument is INTEGER_CST, return it. If the argument is vector
10460 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10461 return NULL_TREE.
10462 Look through location wrappers. */
10464 tree
10465 uniform_integer_cst_p (tree t)
10467 STRIP_ANY_LOCATION_WRAPPER (t);
10469 if (TREE_CODE (t) == INTEGER_CST)
10470 return t;
10472 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10474 t = uniform_vector_p (t);
10475 if (t && TREE_CODE (t) == INTEGER_CST)
10476 return t;
10479 return NULL_TREE;
10482 /* Checks to see if T is a constant or a constant vector and if each element E
10483 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10485 tree
10486 bitmask_inv_cst_vector_p (tree t)
10489 tree_code code = TREE_CODE (t);
10490 tree type = TREE_TYPE (t);
10492 if (!INTEGRAL_TYPE_P (type)
10493 && !VECTOR_INTEGER_TYPE_P (type))
10494 return NULL_TREE;
10496 unsigned HOST_WIDE_INT nelts = 1;
10497 tree cst;
10498 unsigned int idx = 0;
10499 bool uniform = uniform_integer_cst_p (t);
10500 tree newtype = unsigned_type_for (type);
10501 tree_vector_builder builder;
10502 if (code == INTEGER_CST)
10503 cst = t;
10504 else
10506 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10507 return NULL_TREE;
10509 cst = vector_cst_elt (t, 0);
10510 builder.new_vector (newtype, nelts, 1);
10513 tree ty = unsigned_type_for (TREE_TYPE (cst));
10517 if (idx > 0)
10518 cst = vector_cst_elt (t, idx);
10519 wide_int icst = wi::to_wide (cst);
10520 wide_int inv = wi::bit_not (icst);
10521 icst = wi::add (1, inv);
10522 if (wi::popcount (icst) != 1)
10523 return NULL_TREE;
10525 tree newcst = wide_int_to_tree (ty, inv);
10527 if (uniform)
10528 return build_uniform_cst (newtype, newcst);
10530 builder.quick_push (newcst);
10532 while (++idx < nelts);
10534 return builder.build ();
10537 /* If VECTOR_CST T has a single nonzero element, return the index of that
10538 element, otherwise return -1. */
10541 single_nonzero_element (const_tree t)
10543 unsigned HOST_WIDE_INT nelts;
10544 unsigned int repeat_nelts;
10545 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10546 repeat_nelts = nelts;
10547 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10549 nelts = vector_cst_encoded_nelts (t);
10550 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10552 else
10553 return -1;
10555 int res = -1;
10556 for (unsigned int i = 0; i < nelts; ++i)
10558 tree elt = vector_cst_elt (t, i);
10559 if (!integer_zerop (elt) && !real_zerop (elt))
10561 if (res >= 0 || i >= repeat_nelts)
10562 return -1;
10563 res = i;
10566 return res;
10569 /* Build an empty statement at location LOC. */
10571 tree
10572 build_empty_stmt (location_t loc)
10574 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10575 SET_EXPR_LOCATION (t, loc);
10576 return t;
10580 /* Build an OMP clause with code CODE. LOC is the location of the
10581 clause. */
10583 tree
10584 build_omp_clause (location_t loc, enum omp_clause_code code)
10586 tree t;
10587 int size, length;
10589 length = omp_clause_num_ops[code];
10590 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10592 record_node_allocation_statistics (OMP_CLAUSE, size);
10594 t = (tree) ggc_internal_alloc (size);
10595 memset (t, 0, size);
10596 TREE_SET_CODE (t, OMP_CLAUSE);
10597 OMP_CLAUSE_SET_CODE (t, code);
10598 OMP_CLAUSE_LOCATION (t) = loc;
10600 return t;
10603 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10604 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10605 Except for the CODE and operand count field, other storage for the
10606 object is initialized to zeros. */
10608 tree
10609 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10611 tree t;
10612 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10614 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10615 gcc_assert (len >= 1);
10617 record_node_allocation_statistics (code, length);
10619 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10621 TREE_SET_CODE (t, code);
10623 /* Can't use TREE_OPERAND to store the length because if checking is
10624 enabled, it will try to check the length before we store it. :-P */
10625 t->exp.operands[0] = build_int_cst (sizetype, len);
10627 return t;
10630 /* Helper function for build_call_* functions; build a CALL_EXPR with
10631 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10632 the argument slots. */
10634 static tree
10635 build_call_1 (tree return_type, tree fn, int nargs)
10637 tree t;
10639 t = build_vl_exp (CALL_EXPR, nargs + 3);
10640 TREE_TYPE (t) = return_type;
10641 CALL_EXPR_FN (t) = fn;
10642 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10644 return t;
10647 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10648 FN and a null static chain slot. NARGS is the number of call arguments
10649 which are specified as "..." arguments. */
10651 tree
10652 build_call_nary (tree return_type, tree fn, int nargs, ...)
10654 tree ret;
10655 va_list args;
10656 va_start (args, nargs);
10657 ret = build_call_valist (return_type, fn, nargs, args);
10658 va_end (args);
10659 return ret;
10662 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10663 FN and a null static chain slot. NARGS is the number of call arguments
10664 which are specified as a va_list ARGS. */
10666 tree
10667 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10669 tree t;
10670 int i;
10672 t = build_call_1 (return_type, fn, nargs);
10673 for (i = 0; i < nargs; i++)
10674 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10675 process_call_operands (t);
10676 return t;
10679 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10680 FN and a null static chain slot. NARGS is the number of call arguments
10681 which are specified as a tree array ARGS. */
10683 tree
10684 build_call_array_loc (location_t loc, tree return_type, tree fn,
10685 int nargs, const tree *args)
10687 tree t;
10688 int i;
10690 t = build_call_1 (return_type, fn, nargs);
10691 for (i = 0; i < nargs; i++)
10692 CALL_EXPR_ARG (t, i) = args[i];
10693 process_call_operands (t);
10694 SET_EXPR_LOCATION (t, loc);
10695 return t;
10698 /* Like build_call_array, but takes a vec. */
10700 tree
10701 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10703 tree ret, t;
10704 unsigned int ix;
10706 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10707 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10708 CALL_EXPR_ARG (ret, ix) = t;
10709 process_call_operands (ret);
10710 return ret;
10713 /* Conveniently construct a function call expression. FNDECL names the
10714 function to be called and N arguments are passed in the array
10715 ARGARRAY. */
10717 tree
10718 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10720 tree fntype = TREE_TYPE (fndecl);
10721 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10723 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10726 /* Conveniently construct a function call expression. FNDECL names the
10727 function to be called and the arguments are passed in the vector
10728 VEC. */
10730 tree
10731 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10733 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10734 vec_safe_address (vec));
10738 /* Conveniently construct a function call expression. FNDECL names the
10739 function to be called, N is the number of arguments, and the "..."
10740 parameters are the argument expressions. */
10742 tree
10743 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10745 va_list ap;
10746 tree *argarray = XALLOCAVEC (tree, n);
10747 int i;
10749 va_start (ap, n);
10750 for (i = 0; i < n; i++)
10751 argarray[i] = va_arg (ap, tree);
10752 va_end (ap);
10753 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10756 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10757 varargs macros aren't supported by all bootstrap compilers. */
10759 tree
10760 build_call_expr (tree fndecl, int n, ...)
10762 va_list ap;
10763 tree *argarray = XALLOCAVEC (tree, n);
10764 int i;
10766 va_start (ap, n);
10767 for (i = 0; i < n; i++)
10768 argarray[i] = va_arg (ap, tree);
10769 va_end (ap);
10770 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10773 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10774 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10775 It will get gimplified later into an ordinary internal function. */
10777 tree
10778 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10779 tree type, int n, const tree *args)
10781 tree t = build_call_1 (type, NULL_TREE, n);
10782 for (int i = 0; i < n; ++i)
10783 CALL_EXPR_ARG (t, i) = args[i];
10784 SET_EXPR_LOCATION (t, loc);
10785 CALL_EXPR_IFN (t) = ifn;
10786 process_call_operands (t);
10787 return t;
10790 /* Build internal call expression. This is just like CALL_EXPR, except
10791 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10792 internal function. */
10794 tree
10795 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10796 tree type, int n, ...)
10798 va_list ap;
10799 tree *argarray = XALLOCAVEC (tree, n);
10800 int i;
10802 va_start (ap, n);
10803 for (i = 0; i < n; i++)
10804 argarray[i] = va_arg (ap, tree);
10805 va_end (ap);
10806 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10809 /* Return a function call to FN, if the target is guaranteed to support it,
10810 or null otherwise.
10812 N is the number of arguments, passed in the "...", and TYPE is the
10813 type of the return value. */
10815 tree
10816 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10817 int n, ...)
10819 va_list ap;
10820 tree *argarray = XALLOCAVEC (tree, n);
10821 int i;
10823 va_start (ap, n);
10824 for (i = 0; i < n; i++)
10825 argarray[i] = va_arg (ap, tree);
10826 va_end (ap);
10827 if (internal_fn_p (fn))
10829 internal_fn ifn = as_internal_fn (fn);
10830 if (direct_internal_fn_p (ifn))
10832 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10833 if (!direct_internal_fn_supported_p (ifn, types,
10834 OPTIMIZE_FOR_BOTH))
10835 return NULL_TREE;
10837 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10839 else
10841 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10842 if (!fndecl)
10843 return NULL_TREE;
10844 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10848 /* Return a function call to the appropriate builtin alloca variant.
10850 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10851 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10852 bound for SIZE in case it is not a fixed value. */
10854 tree
10855 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10857 if (max_size >= 0)
10859 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10860 return
10861 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10863 else if (align > 0)
10865 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10866 return build_call_expr (t, 2, size, size_int (align));
10868 else
10870 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10871 return build_call_expr (t, 1, size);
10875 /* The built-in decl to use to mark code points believed to be unreachable.
10876 Typically __builtin_unreachable, but __builtin_trap if
10877 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10878 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10879 appropriate ubsan function. When building a call directly, use
10880 {gimple_},build_builtin_unreachable instead. */
10882 tree
10883 builtin_decl_unreachable ()
10885 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10887 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10888 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10889 : flag_unreachable_traps)
10890 fncode = BUILT_IN_UNREACHABLE_TRAP;
10891 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10892 in the sanopt pass. */
10894 return builtin_decl_explicit (fncode);
10897 /* Build a call to __builtin_unreachable, possibly rewritten by
10898 -fsanitize=unreachable. Use this rather than the above when practical. */
10900 tree
10901 build_builtin_unreachable (location_t loc)
10903 tree data = NULL_TREE;
10904 tree fn = sanitize_unreachable_fn (&data, loc);
10905 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10908 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10909 if SIZE == -1) and return a tree node representing char* pointer to
10910 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10911 the STRING_CST value is the LEN bytes at STR (the representation
10912 of the string, which may be wide). Otherwise it's all zeros. */
10914 tree
10915 build_string_literal (unsigned len, const char *str /* = NULL */,
10916 tree eltype /* = char_type_node */,
10917 unsigned HOST_WIDE_INT size /* = -1 */)
10919 tree t = build_string (len, str);
10920 /* Set the maximum valid index based on the string length or SIZE. */
10921 unsigned HOST_WIDE_INT maxidx
10922 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10924 tree index = build_index_type (size_int (maxidx));
10925 eltype = build_type_variant (eltype, 1, 0);
10926 tree type = build_array_type (eltype, index);
10927 TREE_TYPE (t) = type;
10928 TREE_CONSTANT (t) = 1;
10929 TREE_READONLY (t) = 1;
10930 TREE_STATIC (t) = 1;
10932 type = build_pointer_type (eltype);
10933 t = build1 (ADDR_EXPR, type,
10934 build4 (ARRAY_REF, eltype,
10935 t, integer_zero_node, NULL_TREE, NULL_TREE));
10936 return t;
10941 /* Return true if T (assumed to be a DECL) must be assigned a memory
10942 location. */
10944 bool
10945 needs_to_live_in_memory (const_tree t)
10947 return (TREE_ADDRESSABLE (t)
10948 || is_global_var (t)
10949 || (TREE_CODE (t) == RESULT_DECL
10950 && !DECL_BY_REFERENCE (t)
10951 && aggregate_value_p (t, current_function_decl)));
10954 /* Return value of a constant X and sign-extend it. */
10956 HOST_WIDE_INT
10957 int_cst_value (const_tree x)
10959 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10960 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10962 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10963 gcc_assert (cst_and_fits_in_hwi (x));
10965 if (bits < HOST_BITS_PER_WIDE_INT)
10967 bool negative = ((val >> (bits - 1)) & 1) != 0;
10968 if (negative)
10969 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10970 else
10971 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10974 return val;
10977 /* If TYPE is an integral or pointer type, return an integer type with
10978 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10979 if TYPE is already an integer type of signedness UNSIGNEDP.
10980 If TYPE is a floating-point type, return an integer type with the same
10981 bitsize and with the signedness given by UNSIGNEDP; this is useful
10982 when doing bit-level operations on a floating-point value. */
10984 tree
10985 signed_or_unsigned_type_for (int unsignedp, tree type)
10987 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10988 return type;
10990 if (TREE_CODE (type) == VECTOR_TYPE)
10992 tree inner = TREE_TYPE (type);
10993 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10994 if (!inner2)
10995 return NULL_TREE;
10996 if (inner == inner2)
10997 return type;
10998 machine_mode new_mode;
10999 if (VECTOR_MODE_P (TYPE_MODE (type))
11000 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11001 return build_vector_type_for_mode (inner2, new_mode);
11002 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11005 if (TREE_CODE (type) == COMPLEX_TYPE)
11007 tree inner = TREE_TYPE (type);
11008 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11009 if (!inner2)
11010 return NULL_TREE;
11011 if (inner == inner2)
11012 return type;
11013 return build_complex_type (inner2);
11016 unsigned int bits;
11017 if (INTEGRAL_TYPE_P (type)
11018 || POINTER_TYPE_P (type)
11019 || TREE_CODE (type) == OFFSET_TYPE)
11020 bits = TYPE_PRECISION (type);
11021 else if (TREE_CODE (type) == REAL_TYPE)
11022 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11023 else
11024 return NULL_TREE;
11026 return build_nonstandard_integer_type (bits, unsignedp);
11029 /* If TYPE is an integral or pointer type, return an integer type with
11030 the same precision which is unsigned, or itself if TYPE is already an
11031 unsigned integer type. If TYPE is a floating-point type, return an
11032 unsigned integer type with the same bitsize as TYPE. */
11034 tree
11035 unsigned_type_for (tree type)
11037 return signed_or_unsigned_type_for (1, type);
11040 /* If TYPE is an integral or pointer type, return an integer type with
11041 the same precision which is signed, or itself if TYPE is already a
11042 signed integer type. If TYPE is a floating-point type, return a
11043 signed integer type with the same bitsize as TYPE. */
11045 tree
11046 signed_type_for (tree type)
11048 return signed_or_unsigned_type_for (0, type);
11051 /* - For VECTOR_TYPEs:
11052 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11053 - The number of elements must match (known_eq).
11054 - targetm.vectorize.get_mask_mode exists, and exactly
11055 the same mode as the truth type.
11056 - Otherwise, the truth type must be a BOOLEAN_TYPE
11057 or useless_type_conversion_p to BOOLEAN_TYPE. */
11058 bool
11059 is_truth_type_for (tree type, tree truth_type)
11061 machine_mode mask_mode = TYPE_MODE (truth_type);
11062 machine_mode vmode = TYPE_MODE (type);
11063 machine_mode tmask_mode;
11065 if (TREE_CODE (type) == VECTOR_TYPE)
11067 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11068 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11069 TYPE_VECTOR_SUBPARTS (truth_type))
11070 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11071 && tmask_mode == mask_mode)
11072 return true;
11074 return false;
11077 return useless_type_conversion_p (boolean_type_node, truth_type);
11080 /* If TYPE is a vector type, return a signed integer vector type with the
11081 same width and number of subparts. Otherwise return boolean_type_node. */
11083 tree
11084 truth_type_for (tree type)
11086 if (TREE_CODE (type) == VECTOR_TYPE)
11088 if (VECTOR_BOOLEAN_TYPE_P (type))
11089 return type;
11090 return build_truth_vector_type_for (type);
11092 else
11093 return boolean_type_node;
11096 /* Returns the largest value obtainable by casting something in INNER type to
11097 OUTER type. */
11099 tree
11100 upper_bound_in_type (tree outer, tree inner)
11102 unsigned int det = 0;
11103 unsigned oprec = TYPE_PRECISION (outer);
11104 unsigned iprec = TYPE_PRECISION (inner);
11105 unsigned prec;
11107 /* Compute a unique number for every combination. */
11108 det |= (oprec > iprec) ? 4 : 0;
11109 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11110 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11112 /* Determine the exponent to use. */
11113 switch (det)
11115 case 0:
11116 case 1:
11117 /* oprec <= iprec, outer: signed, inner: don't care. */
11118 prec = oprec - 1;
11119 break;
11120 case 2:
11121 case 3:
11122 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11123 prec = oprec;
11124 break;
11125 case 4:
11126 /* oprec > iprec, outer: signed, inner: signed. */
11127 prec = iprec - 1;
11128 break;
11129 case 5:
11130 /* oprec > iprec, outer: signed, inner: unsigned. */
11131 prec = iprec;
11132 break;
11133 case 6:
11134 /* oprec > iprec, outer: unsigned, inner: signed. */
11135 prec = oprec;
11136 break;
11137 case 7:
11138 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11139 prec = iprec;
11140 break;
11141 default:
11142 gcc_unreachable ();
11145 return wide_int_to_tree (outer,
11146 wi::mask (prec, false, TYPE_PRECISION (outer)));
11149 /* Returns the smallest value obtainable by casting something in INNER type to
11150 OUTER type. */
11152 tree
11153 lower_bound_in_type (tree outer, tree inner)
11155 unsigned oprec = TYPE_PRECISION (outer);
11156 unsigned iprec = TYPE_PRECISION (inner);
11158 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11159 and obtain 0. */
11160 if (TYPE_UNSIGNED (outer)
11161 /* If we are widening something of an unsigned type, OUTER type
11162 contains all values of INNER type. In particular, both INNER
11163 and OUTER types have zero in common. */
11164 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11165 return build_int_cst (outer, 0);
11166 else
11168 /* If we are widening a signed type to another signed type, we
11169 want to obtain -2^^(iprec-1). If we are keeping the
11170 precision or narrowing to a signed type, we want to obtain
11171 -2^(oprec-1). */
11172 unsigned prec = oprec > iprec ? iprec : oprec;
11173 return wide_int_to_tree (outer,
11174 wi::mask (prec - 1, true,
11175 TYPE_PRECISION (outer)));
11179 /* Return nonzero if two operands that are suitable for PHI nodes are
11180 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11181 SSA_NAME or invariant. Note that this is strictly an optimization.
11182 That is, callers of this function can directly call operand_equal_p
11183 and get the same result, only slower. */
11186 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11188 if (arg0 == arg1)
11189 return 1;
11190 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11191 return 0;
11192 return operand_equal_p (arg0, arg1, 0);
11195 /* Returns number of zeros at the end of binary representation of X. */
11197 tree
11198 num_ending_zeros (const_tree x)
11200 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11204 #define WALK_SUBTREE(NODE) \
11205 do \
11207 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11208 if (result) \
11209 return result; \
11211 while (0)
11213 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11214 be walked whenever a type is seen in the tree. Rest of operands and return
11215 value are as for walk_tree. */
11217 static tree
11218 walk_type_fields (tree type, walk_tree_fn func, void *data,
11219 hash_set<tree> *pset, walk_tree_lh lh)
11221 tree result = NULL_TREE;
11223 switch (TREE_CODE (type))
11225 case POINTER_TYPE:
11226 case REFERENCE_TYPE:
11227 case VECTOR_TYPE:
11228 /* We have to worry about mutually recursive pointers. These can't
11229 be written in C. They can in Ada. It's pathological, but
11230 there's an ACATS test (c38102a) that checks it. Deal with this
11231 by checking if we're pointing to another pointer, that one
11232 points to another pointer, that one does too, and we have no htab.
11233 If so, get a hash table. We check three levels deep to avoid
11234 the cost of the hash table if we don't need one. */
11235 if (POINTER_TYPE_P (TREE_TYPE (type))
11236 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11237 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11238 && !pset)
11240 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11241 func, data);
11242 if (result)
11243 return result;
11245 break;
11248 /* fall through */
11250 case COMPLEX_TYPE:
11251 WALK_SUBTREE (TREE_TYPE (type));
11252 break;
11254 case METHOD_TYPE:
11255 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11257 /* Fall through. */
11259 case FUNCTION_TYPE:
11260 WALK_SUBTREE (TREE_TYPE (type));
11262 tree arg;
11264 /* We never want to walk into default arguments. */
11265 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11266 WALK_SUBTREE (TREE_VALUE (arg));
11268 break;
11270 case ARRAY_TYPE:
11271 /* Don't follow this nodes's type if a pointer for fear that
11272 we'll have infinite recursion. If we have a PSET, then we
11273 need not fear. */
11274 if (pset
11275 || (!POINTER_TYPE_P (TREE_TYPE (type))
11276 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11277 WALK_SUBTREE (TREE_TYPE (type));
11278 WALK_SUBTREE (TYPE_DOMAIN (type));
11279 break;
11281 case OFFSET_TYPE:
11282 WALK_SUBTREE (TREE_TYPE (type));
11283 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11284 break;
11286 default:
11287 break;
11290 return NULL_TREE;
11293 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11294 called with the DATA and the address of each sub-tree. If FUNC returns a
11295 non-NULL value, the traversal is stopped, and the value returned by FUNC
11296 is returned. If PSET is non-NULL it is used to record the nodes visited,
11297 and to avoid visiting a node more than once. */
11299 tree
11300 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11301 hash_set<tree> *pset, walk_tree_lh lh)
11303 enum tree_code code;
11304 int walk_subtrees;
11305 tree result;
11307 #define WALK_SUBTREE_TAIL(NODE) \
11308 do \
11310 tp = & (NODE); \
11311 goto tail_recurse; \
11313 while (0)
11315 tail_recurse:
11316 /* Skip empty subtrees. */
11317 if (!*tp)
11318 return NULL_TREE;
11320 /* Don't walk the same tree twice, if the user has requested
11321 that we avoid doing so. */
11322 if (pset && pset->add (*tp))
11323 return NULL_TREE;
11325 /* Call the function. */
11326 walk_subtrees = 1;
11327 result = (*func) (tp, &walk_subtrees, data);
11329 /* If we found something, return it. */
11330 if (result)
11331 return result;
11333 code = TREE_CODE (*tp);
11335 /* Even if we didn't, FUNC may have decided that there was nothing
11336 interesting below this point in the tree. */
11337 if (!walk_subtrees)
11339 /* But we still need to check our siblings. */
11340 if (code == TREE_LIST)
11341 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11342 else if (code == OMP_CLAUSE)
11343 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11344 else
11345 return NULL_TREE;
11348 if (lh)
11350 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11351 if (result || !walk_subtrees)
11352 return result;
11355 switch (code)
11357 case ERROR_MARK:
11358 case IDENTIFIER_NODE:
11359 case INTEGER_CST:
11360 case REAL_CST:
11361 case FIXED_CST:
11362 case STRING_CST:
11363 case BLOCK:
11364 case PLACEHOLDER_EXPR:
11365 case SSA_NAME:
11366 case FIELD_DECL:
11367 case RESULT_DECL:
11368 /* None of these have subtrees other than those already walked
11369 above. */
11370 break;
11372 case TREE_LIST:
11373 WALK_SUBTREE (TREE_VALUE (*tp));
11374 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11376 case TREE_VEC:
11378 int len = TREE_VEC_LENGTH (*tp);
11380 if (len == 0)
11381 break;
11383 /* Walk all elements but the last. */
11384 for (int i = 0; i < len - 1; ++i)
11385 WALK_SUBTREE (TREE_VEC_ELT (*tp, i));
11387 /* Now walk the last one as a tail call. */
11388 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, len - 1));
11391 case VECTOR_CST:
11393 unsigned len = vector_cst_encoded_nelts (*tp);
11394 if (len == 0)
11395 break;
11396 /* Walk all elements but the last. */
11397 for (unsigned i = 0; i < len - 1; ++i)
11398 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, i));
11399 /* Now walk the last one as a tail call. */
11400 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, len - 1));
11403 case COMPLEX_CST:
11404 WALK_SUBTREE (TREE_REALPART (*tp));
11405 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11407 case CONSTRUCTOR:
11409 unsigned HOST_WIDE_INT idx;
11410 constructor_elt *ce;
11412 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11413 idx++)
11414 WALK_SUBTREE (ce->value);
11416 break;
11418 case SAVE_EXPR:
11419 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11421 case BIND_EXPR:
11423 tree decl;
11424 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11426 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11427 into declarations that are just mentioned, rather than
11428 declared; they don't really belong to this part of the tree.
11429 And, we can see cycles: the initializer for a declaration
11430 can refer to the declaration itself. */
11431 WALK_SUBTREE (DECL_INITIAL (decl));
11432 WALK_SUBTREE (DECL_SIZE (decl));
11433 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11435 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11438 case STATEMENT_LIST:
11440 tree_stmt_iterator i;
11441 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11442 WALK_SUBTREE (*tsi_stmt_ptr (i));
11444 break;
11446 case OMP_CLAUSE:
11448 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11449 for (int i = 0; i < len; i++)
11450 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11451 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11454 case TARGET_EXPR:
11456 int i, len;
11458 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11459 But, we only want to walk once. */
11460 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11461 for (i = 0; i < len; ++i)
11462 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11463 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11466 case DECL_EXPR:
11467 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11468 defining. We only want to walk into these fields of a type in this
11469 case and not in the general case of a mere reference to the type.
11471 The criterion is as follows: if the field can be an expression, it
11472 must be walked only here. This should be in keeping with the fields
11473 that are directly gimplified in gimplify_type_sizes in order for the
11474 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11475 variable-sized types.
11477 Note that DECLs get walked as part of processing the BIND_EXPR. */
11478 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11480 /* Call the function for the decl so e.g. copy_tree_body_r can
11481 replace it with the remapped one. */
11482 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11483 if (result || !walk_subtrees)
11484 return result;
11486 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11487 if (TREE_CODE (*type_p) == ERROR_MARK)
11488 return NULL_TREE;
11490 /* Call the function for the type. See if it returns anything or
11491 doesn't want us to continue. If we are to continue, walk both
11492 the normal fields and those for the declaration case. */
11493 result = (*func) (type_p, &walk_subtrees, data);
11494 if (result || !walk_subtrees)
11495 return result;
11497 /* But do not walk a pointed-to type since it may itself need to
11498 be walked in the declaration case if it isn't anonymous. */
11499 if (!POINTER_TYPE_P (*type_p))
11501 result = walk_type_fields (*type_p, func, data, pset, lh);
11502 if (result)
11503 return result;
11506 /* If this is a record type, also walk the fields. */
11507 if (RECORD_OR_UNION_TYPE_P (*type_p))
11509 tree field;
11511 for (field = TYPE_FIELDS (*type_p); field;
11512 field = DECL_CHAIN (field))
11514 /* We'd like to look at the type of the field, but we can
11515 easily get infinite recursion. So assume it's pointed
11516 to elsewhere in the tree. Also, ignore things that
11517 aren't fields. */
11518 if (TREE_CODE (field) != FIELD_DECL)
11519 continue;
11521 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11522 WALK_SUBTREE (DECL_SIZE (field));
11523 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11524 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11525 WALK_SUBTREE (DECL_QUALIFIER (field));
11529 /* Same for scalar types. */
11530 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11531 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11532 || TREE_CODE (*type_p) == INTEGER_TYPE
11533 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11534 || TREE_CODE (*type_p) == REAL_TYPE)
11536 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11537 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11540 WALK_SUBTREE (TYPE_SIZE (*type_p));
11541 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11543 /* FALLTHRU */
11545 default:
11546 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11548 int i, len;
11550 /* Walk over all the sub-trees of this operand. */
11551 len = TREE_OPERAND_LENGTH (*tp);
11553 /* Go through the subtrees. We need to do this in forward order so
11554 that the scope of a FOR_EXPR is handled properly. */
11555 if (len)
11557 for (i = 0; i < len - 1; ++i)
11558 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11559 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11562 /* If this is a type, walk the needed fields in the type. */
11563 else if (TYPE_P (*tp))
11564 return walk_type_fields (*tp, func, data, pset, lh);
11565 break;
11568 /* We didn't find what we were looking for. */
11569 return NULL_TREE;
11571 #undef WALK_SUBTREE_TAIL
11573 #undef WALK_SUBTREE
11575 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11577 tree
11578 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11579 walk_tree_lh lh)
11581 tree result;
11583 hash_set<tree> pset;
11584 result = walk_tree_1 (tp, func, data, &pset, lh);
11585 return result;
11589 tree
11590 tree_block (tree t)
11592 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11594 if (IS_EXPR_CODE_CLASS (c))
11595 return LOCATION_BLOCK (t->exp.locus);
11596 gcc_unreachable ();
11597 return NULL;
11600 void
11601 tree_set_block (tree t, tree b)
11603 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11605 if (IS_EXPR_CODE_CLASS (c))
11607 t->exp.locus = set_block (t->exp.locus, b);
11609 else
11610 gcc_unreachable ();
11613 /* Create a nameless artificial label and put it in the current
11614 function context. The label has a location of LOC. Returns the
11615 newly created label. */
11617 tree
11618 create_artificial_label (location_t loc)
11620 tree lab = build_decl (loc,
11621 LABEL_DECL, NULL_TREE, void_type_node);
11623 DECL_ARTIFICIAL (lab) = 1;
11624 DECL_IGNORED_P (lab) = 1;
11625 DECL_CONTEXT (lab) = current_function_decl;
11626 return lab;
11629 /* Given a tree, try to return a useful variable name that we can use
11630 to prefix a temporary that is being assigned the value of the tree.
11631 I.E. given <temp> = &A, return A. */
11633 const char *
11634 get_name (tree t)
11636 tree stripped_decl;
11638 stripped_decl = t;
11639 STRIP_NOPS (stripped_decl);
11640 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11641 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11642 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11644 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11645 if (!name)
11646 return NULL;
11647 return IDENTIFIER_POINTER (name);
11649 else
11651 switch (TREE_CODE (stripped_decl))
11653 case ADDR_EXPR:
11654 return get_name (TREE_OPERAND (stripped_decl, 0));
11655 default:
11656 return NULL;
11661 /* Return true if TYPE has a variable argument list. */
11663 bool
11664 stdarg_p (const_tree fntype)
11666 function_args_iterator args_iter;
11667 tree n = NULL_TREE, t;
11669 if (!fntype)
11670 return false;
11672 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11673 return true;
11675 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11677 n = t;
11680 return n != NULL_TREE && n != void_type_node;
11683 /* Return true if TYPE has a prototype. */
11685 bool
11686 prototype_p (const_tree fntype)
11688 tree t;
11690 gcc_assert (fntype != NULL_TREE);
11692 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11693 return true;
11695 t = TYPE_ARG_TYPES (fntype);
11696 return (t != NULL_TREE);
11699 /* If BLOCK is inlined from an __attribute__((__artificial__))
11700 routine, return pointer to location from where it has been
11701 called. */
11702 location_t *
11703 block_nonartificial_location (tree block)
11705 location_t *ret = NULL;
11707 while (block && TREE_CODE (block) == BLOCK
11708 && BLOCK_ABSTRACT_ORIGIN (block))
11710 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11711 if (TREE_CODE (ao) == FUNCTION_DECL)
11713 /* If AO is an artificial inline, point RET to the
11714 call site locus at which it has been inlined and continue
11715 the loop, in case AO's caller is also an artificial
11716 inline. */
11717 if (DECL_DECLARED_INLINE_P (ao)
11718 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11719 ret = &BLOCK_SOURCE_LOCATION (block);
11720 else
11721 break;
11723 else if (TREE_CODE (ao) != BLOCK)
11724 break;
11726 block = BLOCK_SUPERCONTEXT (block);
11728 return ret;
11732 /* If EXP is inlined from an __attribute__((__artificial__))
11733 function, return the location of the original call expression. */
11735 location_t
11736 tree_nonartificial_location (tree exp)
11738 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11740 if (loc)
11741 return *loc;
11742 else
11743 return EXPR_LOCATION (exp);
11746 /* Return the location into which EXP has been inlined. Analogous
11747 to tree_nonartificial_location() above but not limited to artificial
11748 functions declared inline. If SYSTEM_HEADER is true, return
11749 the macro expansion point of the location if it's in a system header */
11751 location_t
11752 tree_inlined_location (tree exp, bool system_header /* = true */)
11754 location_t loc = UNKNOWN_LOCATION;
11756 tree block = TREE_BLOCK (exp);
11758 while (block && TREE_CODE (block) == BLOCK
11759 && BLOCK_ABSTRACT_ORIGIN (block))
11761 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11762 if (TREE_CODE (ao) == FUNCTION_DECL)
11763 loc = BLOCK_SOURCE_LOCATION (block);
11764 else if (TREE_CODE (ao) != BLOCK)
11765 break;
11767 block = BLOCK_SUPERCONTEXT (block);
11770 if (loc == UNKNOWN_LOCATION)
11772 loc = EXPR_LOCATION (exp);
11773 if (system_header)
11774 /* Only consider macro expansion when the block traversal failed
11775 to find a location. Otherwise it's not relevant. */
11776 return expansion_point_location_if_in_system_header (loc);
11779 return loc;
11782 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11783 nodes. */
11785 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11787 hashval_t
11788 cl_option_hasher::hash (tree x)
11790 const_tree const t = x;
11792 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11793 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11794 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11795 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11796 else
11797 gcc_unreachable ();
11800 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11801 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11802 same. */
11804 bool
11805 cl_option_hasher::equal (tree x, tree y)
11807 const_tree const xt = x;
11808 const_tree const yt = y;
11810 if (TREE_CODE (xt) != TREE_CODE (yt))
11811 return 0;
11813 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11814 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11815 TREE_OPTIMIZATION (yt));
11816 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11817 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11818 TREE_TARGET_OPTION (yt));
11819 else
11820 gcc_unreachable ();
11823 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11825 tree
11826 build_optimization_node (struct gcc_options *opts,
11827 struct gcc_options *opts_set)
11829 tree t;
11831 /* Use the cache of optimization nodes. */
11833 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11834 opts, opts_set);
11836 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11837 t = *slot;
11838 if (!t)
11840 /* Insert this one into the hash table. */
11841 t = cl_optimization_node;
11842 *slot = t;
11844 /* Make a new node for next time round. */
11845 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11848 return t;
11851 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11853 tree
11854 build_target_option_node (struct gcc_options *opts,
11855 struct gcc_options *opts_set)
11857 tree t;
11859 /* Use the cache of optimization nodes. */
11861 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11862 opts, opts_set);
11864 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11865 t = *slot;
11866 if (!t)
11868 /* Insert this one into the hash table. */
11869 t = cl_target_option_node;
11870 *slot = t;
11872 /* Make a new node for next time round. */
11873 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11876 return t;
11879 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11880 so that they aren't saved during PCH writing. */
11882 void
11883 prepare_target_option_nodes_for_pch (void)
11885 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11886 for (; iter != cl_option_hash_table->end (); ++iter)
11887 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11888 TREE_TARGET_GLOBALS (*iter) = NULL;
11891 /* Determine the "ultimate origin" of a block. */
11893 tree
11894 block_ultimate_origin (const_tree block)
11896 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11898 if (origin == NULL_TREE)
11899 return NULL_TREE;
11900 else
11902 gcc_checking_assert ((DECL_P (origin)
11903 && DECL_ORIGIN (origin) == origin)
11904 || BLOCK_ORIGIN (origin) == origin);
11905 return origin;
11909 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11910 no instruction. */
11912 bool
11913 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11915 /* Do not strip casts into or out of differing address spaces. */
11916 if (POINTER_TYPE_P (outer_type)
11917 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11919 if (!POINTER_TYPE_P (inner_type)
11920 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11921 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11922 return false;
11924 else if (POINTER_TYPE_P (inner_type)
11925 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11927 /* We already know that outer_type is not a pointer with
11928 a non-generic address space. */
11929 return false;
11932 /* Use precision rather then machine mode when we can, which gives
11933 the correct answer even for submode (bit-field) types. */
11934 if ((INTEGRAL_TYPE_P (outer_type)
11935 || POINTER_TYPE_P (outer_type)
11936 || TREE_CODE (outer_type) == OFFSET_TYPE)
11937 && (INTEGRAL_TYPE_P (inner_type)
11938 || POINTER_TYPE_P (inner_type)
11939 || TREE_CODE (inner_type) == OFFSET_TYPE))
11940 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11942 /* Otherwise fall back on comparing machine modes (e.g. for
11943 aggregate types, floats). */
11944 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11947 /* Return true iff conversion in EXP generates no instruction. Mark
11948 it inline so that we fully inline into the stripping functions even
11949 though we have two uses of this function. */
11951 static inline bool
11952 tree_nop_conversion (const_tree exp)
11954 tree outer_type, inner_type;
11956 if (location_wrapper_p (exp))
11957 return true;
11958 if (!CONVERT_EXPR_P (exp)
11959 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11960 return false;
11962 outer_type = TREE_TYPE (exp);
11963 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11964 if (!inner_type || inner_type == error_mark_node)
11965 return false;
11967 return tree_nop_conversion_p (outer_type, inner_type);
11970 /* Return true iff conversion in EXP generates no instruction. Don't
11971 consider conversions changing the signedness. */
11973 static bool
11974 tree_sign_nop_conversion (const_tree exp)
11976 tree outer_type, inner_type;
11978 if (!tree_nop_conversion (exp))
11979 return false;
11981 outer_type = TREE_TYPE (exp);
11982 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11984 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11985 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11988 /* Strip conversions from EXP according to tree_nop_conversion and
11989 return the resulting expression. */
11991 tree
11992 tree_strip_nop_conversions (tree exp)
11994 while (tree_nop_conversion (exp))
11995 exp = TREE_OPERAND (exp, 0);
11996 return exp;
11999 /* Strip conversions from EXP according to tree_sign_nop_conversion
12000 and return the resulting expression. */
12002 tree
12003 tree_strip_sign_nop_conversions (tree exp)
12005 while (tree_sign_nop_conversion (exp))
12006 exp = TREE_OPERAND (exp, 0);
12007 return exp;
12010 /* Avoid any floating point extensions from EXP. */
12011 tree
12012 strip_float_extensions (tree exp)
12014 tree sub, expt, subt;
12016 /* For floating point constant look up the narrowest type that can hold
12017 it properly and handle it like (type)(narrowest_type)constant.
12018 This way we can optimize for instance a=a*2.0 where "a" is float
12019 but 2.0 is double constant. */
12020 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12022 REAL_VALUE_TYPE orig;
12023 tree type = NULL;
12025 orig = TREE_REAL_CST (exp);
12026 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12027 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12028 type = float_type_node;
12029 else if (TYPE_PRECISION (TREE_TYPE (exp))
12030 > TYPE_PRECISION (double_type_node)
12031 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12032 type = double_type_node;
12033 if (type)
12034 return build_real_truncate (type, orig);
12037 if (!CONVERT_EXPR_P (exp))
12038 return exp;
12040 sub = TREE_OPERAND (exp, 0);
12041 subt = TREE_TYPE (sub);
12042 expt = TREE_TYPE (exp);
12044 if (!FLOAT_TYPE_P (subt))
12045 return exp;
12047 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12048 return exp;
12050 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12051 return exp;
12053 return strip_float_extensions (sub);
12056 /* Strip out all handled components that produce invariant
12057 offsets. */
12059 const_tree
12060 strip_invariant_refs (const_tree op)
12062 while (handled_component_p (op))
12064 switch (TREE_CODE (op))
12066 case ARRAY_REF:
12067 case ARRAY_RANGE_REF:
12068 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12069 || TREE_OPERAND (op, 2) != NULL_TREE
12070 || TREE_OPERAND (op, 3) != NULL_TREE)
12071 return NULL;
12072 break;
12074 case COMPONENT_REF:
12075 if (TREE_OPERAND (op, 2) != NULL_TREE)
12076 return NULL;
12077 break;
12079 default:;
12081 op = TREE_OPERAND (op, 0);
12084 return op;
12087 /* Strip handled components with zero offset from OP. */
12089 tree
12090 strip_zero_offset_components (tree op)
12092 while (TREE_CODE (op) == COMPONENT_REF
12093 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12094 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12095 op = TREE_OPERAND (op, 0);
12096 return op;
12099 static GTY(()) tree gcc_eh_personality_decl;
12101 /* Return the GCC personality function decl. */
12103 tree
12104 lhd_gcc_personality (void)
12106 if (!gcc_eh_personality_decl)
12107 gcc_eh_personality_decl = build_personality_function ("gcc");
12108 return gcc_eh_personality_decl;
12111 /* TARGET is a call target of GIMPLE call statement
12112 (obtained by gimple_call_fn). Return true if it is
12113 OBJ_TYPE_REF representing an virtual call of C++ method.
12114 (As opposed to OBJ_TYPE_REF representing objc calls
12115 through a cast where middle-end devirtualization machinery
12116 can't apply.) FOR_DUMP_P is true when being called from
12117 the dump routines. */
12119 bool
12120 virtual_method_call_p (const_tree target, bool for_dump_p)
12122 if (TREE_CODE (target) != OBJ_TYPE_REF)
12123 return false;
12124 tree t = TREE_TYPE (target);
12125 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12126 t = TREE_TYPE (t);
12127 if (TREE_CODE (t) == FUNCTION_TYPE)
12128 return false;
12129 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12130 /* If we do not have BINFO associated, it means that type was built
12131 without devirtualization enabled. Do not consider this a virtual
12132 call. */
12133 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12134 return false;
12135 return true;
12138 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12140 static tree
12141 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12143 unsigned int i;
12144 tree base_binfo, b;
12146 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12147 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12148 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12149 return base_binfo;
12150 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12151 return b;
12152 return NULL;
12155 /* Try to find a base info of BINFO that would have its field decl at offset
12156 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12157 found, return, otherwise return NULL_TREE. */
12159 tree
12160 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12162 tree type = BINFO_TYPE (binfo);
12164 while (true)
12166 HOST_WIDE_INT pos, size;
12167 tree fld;
12168 int i;
12170 if (types_same_for_odr (type, expected_type))
12171 return binfo;
12172 if (maybe_lt (offset, 0))
12173 return NULL_TREE;
12175 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12177 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12178 continue;
12180 pos = int_bit_position (fld);
12181 size = tree_to_uhwi (DECL_SIZE (fld));
12182 if (known_in_range_p (offset, pos, size))
12183 break;
12185 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12186 return NULL_TREE;
12188 /* Offset 0 indicates the primary base, whose vtable contents are
12189 represented in the binfo for the derived class. */
12190 else if (maybe_ne (offset, 0))
12192 tree found_binfo = NULL, base_binfo;
12193 /* Offsets in BINFO are in bytes relative to the whole structure
12194 while POS is in bits relative to the containing field. */
12195 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12196 / BITS_PER_UNIT);
12198 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12199 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12200 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12202 found_binfo = base_binfo;
12203 break;
12205 if (found_binfo)
12206 binfo = found_binfo;
12207 else
12208 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12209 binfo_offset);
12212 type = TREE_TYPE (fld);
12213 offset -= pos;
12217 /* Returns true if X is a typedef decl. */
12219 bool
12220 is_typedef_decl (const_tree x)
12222 return (x && TREE_CODE (x) == TYPE_DECL
12223 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12226 /* Returns true iff TYPE is a type variant created for a typedef. */
12228 bool
12229 typedef_variant_p (const_tree type)
12231 return is_typedef_decl (TYPE_NAME (type));
12234 /* PR 84195: Replace control characters in "unescaped" with their
12235 escaped equivalents. Allow newlines if -fmessage-length has
12236 been set to a non-zero value. This is done here, rather than
12237 where the attribute is recorded as the message length can
12238 change between these two locations. */
12240 void
12241 escaped_string::escape (const char *unescaped)
12243 char *escaped;
12244 size_t i, new_i, len;
12246 if (m_owned)
12247 free (m_str);
12249 m_str = const_cast<char *> (unescaped);
12250 m_owned = false;
12252 if (unescaped == NULL || *unescaped == 0)
12253 return;
12255 len = strlen (unescaped);
12256 escaped = NULL;
12257 new_i = 0;
12259 for (i = 0; i < len; i++)
12261 char c = unescaped[i];
12263 if (!ISCNTRL (c))
12265 if (escaped)
12266 escaped[new_i++] = c;
12267 continue;
12270 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12272 if (escaped == NULL)
12274 /* We only allocate space for a new string if we
12275 actually encounter a control character that
12276 needs replacing. */
12277 escaped = (char *) xmalloc (len * 2 + 1);
12278 strncpy (escaped, unescaped, i);
12279 new_i = i;
12282 escaped[new_i++] = '\\';
12284 switch (c)
12286 case '\a': escaped[new_i++] = 'a'; break;
12287 case '\b': escaped[new_i++] = 'b'; break;
12288 case '\f': escaped[new_i++] = 'f'; break;
12289 case '\n': escaped[new_i++] = 'n'; break;
12290 case '\r': escaped[new_i++] = 'r'; break;
12291 case '\t': escaped[new_i++] = 't'; break;
12292 case '\v': escaped[new_i++] = 'v'; break;
12293 default: escaped[new_i++] = '?'; break;
12296 else if (escaped)
12297 escaped[new_i++] = c;
12300 if (escaped)
12302 escaped[new_i] = 0;
12303 m_str = escaped;
12304 m_owned = true;
12308 /* Warn about a use of an identifier which was marked deprecated. Returns
12309 whether a warning was given. */
12311 bool
12312 warn_deprecated_use (tree node, tree attr)
12314 escaped_string msg;
12316 if (node == 0 || !warn_deprecated_decl)
12317 return false;
12319 if (!attr)
12321 if (DECL_P (node))
12322 attr = DECL_ATTRIBUTES (node);
12323 else if (TYPE_P (node))
12325 tree decl = TYPE_STUB_DECL (node);
12326 if (decl)
12327 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12328 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12329 != NULL_TREE)
12331 node = TREE_TYPE (decl);
12332 attr = TYPE_ATTRIBUTES (node);
12337 if (attr)
12338 attr = lookup_attribute ("deprecated", attr);
12340 if (attr)
12341 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12343 bool w = false;
12344 if (DECL_P (node))
12346 auto_diagnostic_group d;
12347 if (msg)
12348 w = warning (OPT_Wdeprecated_declarations,
12349 "%qD is deprecated: %s", node, (const char *) msg);
12350 else
12351 w = warning (OPT_Wdeprecated_declarations,
12352 "%qD is deprecated", node);
12353 if (w)
12354 inform (DECL_SOURCE_LOCATION (node), "declared here");
12356 else if (TYPE_P (node))
12358 tree what = NULL_TREE;
12359 tree decl = TYPE_STUB_DECL (node);
12361 if (TYPE_NAME (node))
12363 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12364 what = TYPE_NAME (node);
12365 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12366 && DECL_NAME (TYPE_NAME (node)))
12367 what = DECL_NAME (TYPE_NAME (node));
12370 auto_diagnostic_group d;
12371 if (what)
12373 if (msg)
12374 w = warning (OPT_Wdeprecated_declarations,
12375 "%qE is deprecated: %s", what, (const char *) msg);
12376 else
12377 w = warning (OPT_Wdeprecated_declarations,
12378 "%qE is deprecated", what);
12380 else
12382 if (msg)
12383 w = warning (OPT_Wdeprecated_declarations,
12384 "type is deprecated: %s", (const char *) msg);
12385 else
12386 w = warning (OPT_Wdeprecated_declarations,
12387 "type is deprecated");
12390 if (w && decl)
12391 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12394 return w;
12397 /* Error out with an identifier which was marked 'unavailable'. */
12398 void
12399 error_unavailable_use (tree node, tree attr)
12401 escaped_string msg;
12403 if (node == 0)
12404 return;
12406 if (!attr)
12408 if (DECL_P (node))
12409 attr = DECL_ATTRIBUTES (node);
12410 else if (TYPE_P (node))
12412 tree decl = TYPE_STUB_DECL (node);
12413 if (decl)
12414 attr = lookup_attribute ("unavailable",
12415 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12419 if (attr)
12420 attr = lookup_attribute ("unavailable", attr);
12422 if (attr)
12423 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12425 if (DECL_P (node))
12427 auto_diagnostic_group d;
12428 if (msg)
12429 error ("%qD is unavailable: %s", node, (const char *) msg);
12430 else
12431 error ("%qD is unavailable", node);
12432 inform (DECL_SOURCE_LOCATION (node), "declared here");
12434 else if (TYPE_P (node))
12436 tree what = NULL_TREE;
12437 tree decl = TYPE_STUB_DECL (node);
12439 if (TYPE_NAME (node))
12441 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12442 what = TYPE_NAME (node);
12443 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12444 && DECL_NAME (TYPE_NAME (node)))
12445 what = DECL_NAME (TYPE_NAME (node));
12448 auto_diagnostic_group d;
12449 if (what)
12451 if (msg)
12452 error ("%qE is unavailable: %s", what, (const char *) msg);
12453 else
12454 error ("%qE is unavailable", what);
12456 else
12458 if (msg)
12459 error ("type is unavailable: %s", (const char *) msg);
12460 else
12461 error ("type is unavailable");
12464 if (decl)
12465 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12469 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12470 somewhere in it. */
12472 bool
12473 contains_bitfld_component_ref_p (const_tree ref)
12475 while (handled_component_p (ref))
12477 if (TREE_CODE (ref) == COMPONENT_REF
12478 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12479 return true;
12480 ref = TREE_OPERAND (ref, 0);
12483 return false;
12486 /* Try to determine whether a TRY_CATCH expression can fall through.
12487 This is a subroutine of block_may_fallthru. */
12489 static bool
12490 try_catch_may_fallthru (const_tree stmt)
12492 tree_stmt_iterator i;
12494 /* If the TRY block can fall through, the whole TRY_CATCH can
12495 fall through. */
12496 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12497 return true;
12499 i = tsi_start (TREE_OPERAND (stmt, 1));
12500 switch (TREE_CODE (tsi_stmt (i)))
12502 case CATCH_EXPR:
12503 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12504 catch expression and a body. The whole TRY_CATCH may fall
12505 through iff any of the catch bodies falls through. */
12506 for (; !tsi_end_p (i); tsi_next (&i))
12508 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12509 return true;
12511 return false;
12513 case EH_FILTER_EXPR:
12514 /* The exception filter expression only matters if there is an
12515 exception. If the exception does not match EH_FILTER_TYPES,
12516 we will execute EH_FILTER_FAILURE, and we will fall through
12517 if that falls through. If the exception does match
12518 EH_FILTER_TYPES, the stack unwinder will continue up the
12519 stack, so we will not fall through. We don't know whether we
12520 will throw an exception which matches EH_FILTER_TYPES or not,
12521 so we just ignore EH_FILTER_TYPES and assume that we might
12522 throw an exception which doesn't match. */
12523 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12525 default:
12526 /* This case represents statements to be executed when an
12527 exception occurs. Those statements are implicitly followed
12528 by a RESX statement to resume execution after the exception.
12529 So in this case the TRY_CATCH never falls through. */
12530 return false;
12534 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12535 need not be 100% accurate; simply be conservative and return true if we
12536 don't know. This is used only to avoid stupidly generating extra code.
12537 If we're wrong, we'll just delete the extra code later. */
12539 bool
12540 block_may_fallthru (const_tree block)
12542 /* This CONST_CAST is okay because expr_last returns its argument
12543 unmodified and we assign it to a const_tree. */
12544 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12546 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12548 case GOTO_EXPR:
12549 case RETURN_EXPR:
12550 /* Easy cases. If the last statement of the block implies
12551 control transfer, then we can't fall through. */
12552 return false;
12554 case SWITCH_EXPR:
12555 /* If there is a default: label or case labels cover all possible
12556 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12557 to some case label in all cases and all we care is whether the
12558 SWITCH_BODY falls through. */
12559 if (SWITCH_ALL_CASES_P (stmt))
12560 return block_may_fallthru (SWITCH_BODY (stmt));
12561 return true;
12563 case COND_EXPR:
12564 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12565 return true;
12566 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12568 case BIND_EXPR:
12569 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12571 case TRY_CATCH_EXPR:
12572 return try_catch_may_fallthru (stmt);
12574 case TRY_FINALLY_EXPR:
12575 /* The finally clause is always executed after the try clause,
12576 so if it does not fall through, then the try-finally will not
12577 fall through. Otherwise, if the try clause does not fall
12578 through, then when the finally clause falls through it will
12579 resume execution wherever the try clause was going. So the
12580 whole try-finally will only fall through if both the try
12581 clause and the finally clause fall through. */
12582 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12583 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12585 case EH_ELSE_EXPR:
12586 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12588 case MODIFY_EXPR:
12589 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12590 stmt = TREE_OPERAND (stmt, 1);
12591 else
12592 return true;
12593 /* FALLTHRU */
12595 case CALL_EXPR:
12596 /* Functions that do not return do not fall through. */
12597 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12599 case CLEANUP_POINT_EXPR:
12600 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12602 case TARGET_EXPR:
12603 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12605 case ERROR_MARK:
12606 return true;
12608 default:
12609 return lang_hooks.block_may_fallthru (stmt);
12613 /* True if we are using EH to handle cleanups. */
12614 static bool using_eh_for_cleanups_flag = false;
12616 /* This routine is called from front ends to indicate eh should be used for
12617 cleanups. */
12618 void
12619 using_eh_for_cleanups (void)
12621 using_eh_for_cleanups_flag = true;
12624 /* Query whether EH is used for cleanups. */
12625 bool
12626 using_eh_for_cleanups_p (void)
12628 return using_eh_for_cleanups_flag;
12631 /* Wrapper for tree_code_name to ensure that tree code is valid */
12632 const char *
12633 get_tree_code_name (enum tree_code code)
12635 const char *invalid = "<invalid tree code>";
12637 /* The tree_code enum promotes to signed, but we could be getting
12638 invalid values, so force an unsigned comparison. */
12639 if (unsigned (code) >= MAX_TREE_CODES)
12641 if ((unsigned)code == 0xa5a5)
12642 return "ggc_freed";
12643 return invalid;
12646 return tree_code_name[code];
12649 /* Drops the TREE_OVERFLOW flag from T. */
12651 tree
12652 drop_tree_overflow (tree t)
12654 gcc_checking_assert (TREE_OVERFLOW (t));
12656 /* For tree codes with a sharing machinery re-build the result. */
12657 if (poly_int_tree_p (t))
12658 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12660 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12661 and canonicalize the result. */
12662 if (TREE_CODE (t) == VECTOR_CST)
12664 tree_vector_builder builder;
12665 builder.new_unary_operation (TREE_TYPE (t), t, true);
12666 unsigned int count = builder.encoded_nelts ();
12667 for (unsigned int i = 0; i < count; ++i)
12669 tree elt = VECTOR_CST_ELT (t, i);
12670 if (TREE_OVERFLOW (elt))
12671 elt = drop_tree_overflow (elt);
12672 builder.quick_push (elt);
12674 return builder.build ();
12677 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12678 and drop the flag. */
12679 t = copy_node (t);
12680 TREE_OVERFLOW (t) = 0;
12682 /* For constants that contain nested constants, drop the flag
12683 from those as well. */
12684 if (TREE_CODE (t) == COMPLEX_CST)
12686 if (TREE_OVERFLOW (TREE_REALPART (t)))
12687 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12688 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12689 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12692 return t;
12695 /* Given a memory reference expression T, return its base address.
12696 The base address of a memory reference expression is the main
12697 object being referenced. For instance, the base address for
12698 'array[i].fld[j]' is 'array'. You can think of this as stripping
12699 away the offset part from a memory address.
12701 This function calls handled_component_p to strip away all the inner
12702 parts of the memory reference until it reaches the base object. */
12704 tree
12705 get_base_address (tree t)
12707 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12708 t = TREE_OPERAND (t, 0);
12709 while (handled_component_p (t))
12710 t = TREE_OPERAND (t, 0);
12712 if ((TREE_CODE (t) == MEM_REF
12713 || TREE_CODE (t) == TARGET_MEM_REF)
12714 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12715 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12717 return t;
12720 /* Return a tree of sizetype representing the size, in bytes, of the element
12721 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12723 tree
12724 array_ref_element_size (tree exp)
12726 tree aligned_size = TREE_OPERAND (exp, 3);
12727 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12728 location_t loc = EXPR_LOCATION (exp);
12730 /* If a size was specified in the ARRAY_REF, it's the size measured
12731 in alignment units of the element type. So multiply by that value. */
12732 if (aligned_size)
12734 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12735 sizetype from another type of the same width and signedness. */
12736 if (TREE_TYPE (aligned_size) != sizetype)
12737 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12738 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12739 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12742 /* Otherwise, take the size from that of the element type. Substitute
12743 any PLACEHOLDER_EXPR that we have. */
12744 else
12745 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12748 /* Return a tree representing the lower bound of the array mentioned in
12749 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12751 tree
12752 array_ref_low_bound (tree exp)
12754 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12756 /* If a lower bound is specified in EXP, use it. */
12757 if (TREE_OPERAND (exp, 2))
12758 return TREE_OPERAND (exp, 2);
12760 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12761 substituting for a PLACEHOLDER_EXPR as needed. */
12762 if (domain_type && TYPE_MIN_VALUE (domain_type))
12763 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12765 /* Otherwise, return a zero of the appropriate type. */
12766 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12767 return (idxtype == error_mark_node
12768 ? integer_zero_node : build_int_cst (idxtype, 0));
12771 /* Return a tree representing the upper bound of the array mentioned in
12772 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12774 tree
12775 array_ref_up_bound (tree exp)
12777 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12779 /* If there is a domain type and it has an upper bound, use it, substituting
12780 for a PLACEHOLDER_EXPR as needed. */
12781 if (domain_type && TYPE_MAX_VALUE (domain_type))
12782 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12784 /* Otherwise fail. */
12785 return NULL_TREE;
12788 /* Returns true if REF is an array reference, a component reference,
12789 or a memory reference to an array whose actual size might be larger
12790 than its upper bound implies, there are multiple cases:
12791 A. a ref to a flexible array member at the end of a structure;
12792 B. a ref to an array with a different type against the original decl;
12793 for example:
12795 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12796 (*((char(*)[16])&a[0]))[i+8]
12798 C. a ref to an array that was passed as a parameter;
12799 for example:
12801 int test (uint8_t *p, uint32_t t[1][1], int n) {
12802 for (int i = 0; i < 4; i++, p++)
12803 t[i][0] = ...;
12805 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12808 bool
12809 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12811 /* The TYPE for this array referece. */
12812 tree atype = NULL_TREE;
12813 /* The FIELD_DECL for the array field in the containing structure. */
12814 tree afield_decl = NULL_TREE;
12815 /* Whether this array is the trailing array of a structure. */
12816 bool is_trailing_array_tmp = false;
12817 if (!is_trailing_array)
12818 is_trailing_array = &is_trailing_array_tmp;
12820 if (TREE_CODE (ref) == ARRAY_REF
12821 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12823 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12824 ref = TREE_OPERAND (ref, 0);
12826 else if (TREE_CODE (ref) == COMPONENT_REF
12827 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12829 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12830 afield_decl = TREE_OPERAND (ref, 1);
12832 else if (TREE_CODE (ref) == MEM_REF)
12834 tree arg = TREE_OPERAND (ref, 0);
12835 if (TREE_CODE (arg) == ADDR_EXPR)
12836 arg = TREE_OPERAND (arg, 0);
12837 tree argtype = TREE_TYPE (arg);
12838 if (TREE_CODE (argtype) == RECORD_TYPE)
12840 if (tree fld = last_field (argtype))
12842 atype = TREE_TYPE (fld);
12843 afield_decl = fld;
12844 if (TREE_CODE (atype) != ARRAY_TYPE)
12845 return false;
12846 if (VAR_P (arg) && DECL_SIZE (fld))
12847 return false;
12849 else
12850 return false;
12852 else
12853 return false;
12855 else
12856 return false;
12858 if (TREE_CODE (ref) == STRING_CST)
12859 return false;
12861 tree ref_to_array = ref;
12862 while (handled_component_p (ref))
12864 /* If the reference chain contains a component reference to a
12865 non-union type and there follows another field the reference
12866 is not at the end of a structure. */
12867 if (TREE_CODE (ref) == COMPONENT_REF)
12869 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12871 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12872 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12873 nextf = DECL_CHAIN (nextf);
12874 if (nextf)
12875 return false;
12878 /* If we have a multi-dimensional array we do not consider
12879 a non-innermost dimension as flex array if the whole
12880 multi-dimensional array is at struct end.
12881 Same for an array of aggregates with a trailing array
12882 member. */
12883 else if (TREE_CODE (ref) == ARRAY_REF)
12884 return false;
12885 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12887 /* If we view an underlying object as sth else then what we
12888 gathered up to now is what we have to rely on. */
12889 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12890 break;
12891 else
12892 gcc_unreachable ();
12894 ref = TREE_OPERAND (ref, 0);
12897 gcc_assert (!afield_decl
12898 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12900 /* The array now is at struct end. Treat flexible array member as
12901 always subject to extend, even into just padding constrained by
12902 an underlying decl. */
12903 if (! TYPE_SIZE (atype)
12904 || ! TYPE_DOMAIN (atype)
12905 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12907 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12908 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12911 /* If the reference is based on a declared entity, the size of the array
12912 is constrained by its given domain. (Do not trust commons PR/69368). */
12913 ref = get_base_address (ref);
12914 if (ref
12915 && DECL_P (ref)
12916 && !(flag_unconstrained_commons
12917 && VAR_P (ref) && DECL_COMMON (ref))
12918 && DECL_SIZE_UNIT (ref)
12919 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12921 /* If the object itself is the array it is not at struct end. */
12922 if (DECL_P (ref_to_array))
12923 return false;
12925 /* Check whether the array domain covers all of the available
12926 padding. */
12927 poly_int64 offset;
12928 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12929 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12930 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12932 *is_trailing_array
12933 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12934 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12936 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12938 *is_trailing_array
12939 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12940 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12943 /* If at least one extra element fits it is a flexarray. */
12944 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12945 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12946 + 2)
12947 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12948 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12950 *is_trailing_array
12951 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12952 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12955 return false;
12958 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12959 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12963 /* Return a tree representing the offset, in bytes, of the field referenced
12964 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12966 tree
12967 component_ref_field_offset (tree exp)
12969 tree aligned_offset = TREE_OPERAND (exp, 2);
12970 tree field = TREE_OPERAND (exp, 1);
12971 location_t loc = EXPR_LOCATION (exp);
12973 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12974 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12975 value. */
12976 if (aligned_offset)
12978 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12979 sizetype from another type of the same width and signedness. */
12980 if (TREE_TYPE (aligned_offset) != sizetype)
12981 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12982 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12983 size_int (DECL_OFFSET_ALIGN (field)
12984 / BITS_PER_UNIT));
12987 /* Otherwise, take the offset from that of the field. Substitute
12988 any PLACEHOLDER_EXPR that we have. */
12989 else
12990 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12993 /* Given the initializer INIT, return the initializer for the field
12994 DECL if it exists, otherwise null. Used to obtain the initializer
12995 for a flexible array member and determine its size. */
12997 static tree
12998 get_initializer_for (tree init, tree decl)
13000 STRIP_NOPS (init);
13002 tree fld, fld_init;
13003 unsigned HOST_WIDE_INT i;
13004 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13006 if (decl == fld)
13007 return fld_init;
13009 if (TREE_CODE (fld) == CONSTRUCTOR)
13011 fld_init = get_initializer_for (fld_init, decl);
13012 if (fld_init)
13013 return fld_init;
13017 return NULL_TREE;
13020 /* Determines the special array member type for the array reference REF. */
13021 special_array_member
13022 component_ref_sam_type (tree ref)
13024 special_array_member sam_type = special_array_member::none;
13026 tree member = TREE_OPERAND (ref, 1);
13027 tree memsize = DECL_SIZE_UNIT (member);
13028 if (memsize)
13030 tree memtype = TREE_TYPE (member);
13031 if (TREE_CODE (memtype) != ARRAY_TYPE)
13032 return sam_type;
13034 bool trailing = false;
13035 (void) array_ref_flexible_size_p (ref, &trailing);
13036 bool zero_elts = integer_zerop (memsize);
13037 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13039 /* If array element has zero size, verify if it is a flexible
13040 array member or zero length array. Clear zero_elts if
13041 it has one or more members or is a VLA member. */
13042 if (tree dom = TYPE_DOMAIN (memtype))
13043 if (tree min = TYPE_MIN_VALUE (dom))
13044 if (tree max = TYPE_MAX_VALUE (dom))
13045 if (TREE_CODE (min) != INTEGER_CST
13046 || TREE_CODE (max) != INTEGER_CST
13047 || !((integer_zerop (min) && integer_all_onesp (max))
13048 || tree_int_cst_lt (max, min)))
13049 zero_elts = false;
13051 if (!trailing && !zero_elts)
13052 /* MEMBER is an interior array with more than one element. */
13053 return special_array_member::int_n;
13055 if (zero_elts)
13057 if (trailing)
13058 return special_array_member::trail_0;
13059 else
13060 return special_array_member::int_0;
13063 if (!zero_elts)
13064 if (tree dom = TYPE_DOMAIN (memtype))
13065 if (tree min = TYPE_MIN_VALUE (dom))
13066 if (tree max = TYPE_MAX_VALUE (dom))
13067 if (TREE_CODE (min) == INTEGER_CST
13068 && TREE_CODE (max) == INTEGER_CST)
13070 offset_int minidx = wi::to_offset (min);
13071 offset_int maxidx = wi::to_offset (max);
13072 offset_int neltsm1 = maxidx - minidx;
13073 if (neltsm1 > 0)
13074 /* MEMBER is a trailing array with more than
13075 one elements. */
13076 return special_array_member::trail_n;
13078 if (neltsm1 == 0)
13079 return special_array_member::trail_1;
13083 return sam_type;
13086 /* Determines the size of the member referenced by the COMPONENT_REF
13087 REF, using its initializer expression if necessary in order to
13088 determine the size of an initialized flexible array member.
13089 If non-null, set *SAM to the type of special array member.
13090 Returns the size as sizetype (which might be zero for an object
13091 with an uninitialized flexible array member) or null if the size
13092 cannot be determined. */
13094 tree
13095 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13097 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13099 special_array_member sambuf;
13100 if (!sam)
13101 sam = &sambuf;
13102 *sam = component_ref_sam_type (ref);
13104 /* The object/argument referenced by the COMPONENT_REF and its type. */
13105 tree arg = TREE_OPERAND (ref, 0);
13106 tree argtype = TREE_TYPE (arg);
13107 /* The referenced member. */
13108 tree member = TREE_OPERAND (ref, 1);
13110 tree memsize = DECL_SIZE_UNIT (member);
13111 if (memsize)
13113 tree memtype = TREE_TYPE (member);
13114 if (TREE_CODE (memtype) != ARRAY_TYPE)
13115 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13116 to the type of a class with a virtual base which doesn't
13117 reflect the size of the virtual's members (see pr97595).
13118 If that's the case fail for now and implement something
13119 more robust in the future. */
13120 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13121 ? memsize : NULL_TREE);
13123 /* 2-or-more elements arrays are treated as normal arrays by default. */
13124 if (*sam == special_array_member::int_n
13125 || *sam == special_array_member::trail_n)
13126 return memsize;
13128 tree afield_decl = TREE_OPERAND (ref, 1);
13129 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13130 /* If the trailing array is a not a flexible array member, treat it as
13131 a normal array. */
13132 if (DECL_NOT_FLEXARRAY (afield_decl)
13133 && *sam != special_array_member::int_0)
13134 return memsize;
13136 if (*sam == special_array_member::int_0)
13137 memsize = NULL_TREE;
13139 /* For a reference to a flexible array member of a union
13140 use the size of the union instead of the size of the member. */
13141 if (TREE_CODE (argtype) == UNION_TYPE)
13142 memsize = TYPE_SIZE_UNIT (argtype);
13145 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13146 array member, or an array of length one treated as such. */
13148 /* If the reference is to a declared object and the member a true
13149 flexible array, try to determine its size from its initializer. */
13150 poly_int64 baseoff = 0;
13151 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13152 if (!base || !VAR_P (base))
13154 if (*sam != special_array_member::int_0)
13155 return NULL_TREE;
13157 if (TREE_CODE (arg) != COMPONENT_REF)
13158 return NULL_TREE;
13160 base = arg;
13161 while (TREE_CODE (base) == COMPONENT_REF)
13162 base = TREE_OPERAND (base, 0);
13163 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13166 /* BASE is the declared object of which MEMBER is either a member
13167 or that is cast to ARGTYPE (e.g., a char buffer used to store
13168 an ARGTYPE object). */
13169 tree basetype = TREE_TYPE (base);
13171 /* Determine the base type of the referenced object. If it's
13172 the same as ARGTYPE and MEMBER has a known size, return it. */
13173 tree bt = basetype;
13174 if (*sam != special_array_member::int_0)
13175 while (TREE_CODE (bt) == ARRAY_TYPE)
13176 bt = TREE_TYPE (bt);
13177 bool typematch = useless_type_conversion_p (argtype, bt);
13178 if (memsize && typematch)
13179 return memsize;
13181 memsize = NULL_TREE;
13183 if (typematch)
13184 /* MEMBER is a true flexible array member. Compute its size from
13185 the initializer of the BASE object if it has one. */
13186 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13187 if (init != error_mark_node)
13189 init = get_initializer_for (init, member);
13190 if (init)
13192 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13193 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13195 /* Use the larger of the initializer size and the tail
13196 padding in the enclosing struct. */
13197 poly_int64 rsz = tree_to_poly_int64 (refsize);
13198 rsz -= baseoff;
13199 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13200 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13203 baseoff = 0;
13207 if (!memsize)
13209 if (typematch)
13211 if (DECL_P (base)
13212 && DECL_EXTERNAL (base)
13213 && bt == basetype
13214 && *sam != special_array_member::int_0)
13215 /* The size of a flexible array member of an extern struct
13216 with no initializer cannot be determined (it's defined
13217 in another translation unit and can have an initializer
13218 with an arbitrary number of elements). */
13219 return NULL_TREE;
13221 /* Use the size of the base struct or, for interior zero-length
13222 arrays, the size of the enclosing type. */
13223 memsize = TYPE_SIZE_UNIT (bt);
13225 else if (DECL_P (base))
13226 /* Use the size of the BASE object (possibly an array of some
13227 other type such as char used to store the struct). */
13228 memsize = DECL_SIZE_UNIT (base);
13229 else
13230 return NULL_TREE;
13233 /* If the flexible array member has a known size use the greater
13234 of it and the tail padding in the enclosing struct.
13235 Otherwise, when the size of the flexible array member is unknown
13236 and the referenced object is not a struct, use the size of its
13237 type when known. This detects sizes of array buffers when cast
13238 to struct types with flexible array members. */
13239 if (memsize)
13241 if (!tree_fits_poly_int64_p (memsize))
13242 return NULL_TREE;
13243 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13244 if (known_lt (baseoff, memsz64))
13246 memsz64 -= baseoff;
13247 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13249 return size_zero_node;
13252 /* Return "don't know" for an external non-array object since its
13253 flexible array member can be initialized to have any number of
13254 elements. Otherwise, return zero because the flexible array
13255 member has no elements. */
13256 return (DECL_P (base)
13257 && DECL_EXTERNAL (base)
13258 && (!typematch
13259 || TREE_CODE (basetype) != ARRAY_TYPE)
13260 ? NULL_TREE : size_zero_node);
13263 /* Return the machine mode of T. For vectors, returns the mode of the
13264 inner type. The main use case is to feed the result to HONOR_NANS,
13265 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13267 machine_mode
13268 element_mode (const_tree t)
13270 if (!TYPE_P (t))
13271 t = TREE_TYPE (t);
13272 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13273 t = TREE_TYPE (t);
13274 return TYPE_MODE (t);
13277 /* Vector types need to re-check the target flags each time we report
13278 the machine mode. We need to do this because attribute target can
13279 change the result of vector_mode_supported_p and have_regs_of_mode
13280 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13281 change on a per-function basis. */
13282 /* ??? Possibly a better solution is to run through all the types
13283 referenced by a function and re-compute the TYPE_MODE once, rather
13284 than make the TYPE_MODE macro call a function. */
13286 machine_mode
13287 vector_type_mode (const_tree t)
13289 machine_mode mode;
13291 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13293 mode = t->type_common.mode;
13294 if (VECTOR_MODE_P (mode)
13295 && (!targetm.vector_mode_supported_p (mode)
13296 || !have_regs_of_mode[mode]))
13298 scalar_int_mode innermode;
13300 /* For integers, try mapping it to a same-sized scalar mode. */
13301 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13303 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13304 * GET_MODE_BITSIZE (innermode));
13305 scalar_int_mode mode;
13306 if (int_mode_for_size (size, 0).exists (&mode)
13307 && have_regs_of_mode[mode])
13308 return mode;
13311 return BLKmode;
13314 return mode;
13317 /* Return the size in bits of each element of vector type TYPE. */
13319 unsigned int
13320 vector_element_bits (const_tree type)
13322 gcc_checking_assert (VECTOR_TYPE_P (type));
13323 if (VECTOR_BOOLEAN_TYPE_P (type))
13324 return TYPE_PRECISION (TREE_TYPE (type));
13325 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13328 /* Calculate the size in bits of each element of vector type TYPE
13329 and return the result as a tree of type bitsizetype. */
13331 tree
13332 vector_element_bits_tree (const_tree type)
13334 gcc_checking_assert (VECTOR_TYPE_P (type));
13335 if (VECTOR_BOOLEAN_TYPE_P (type))
13336 return bitsize_int (vector_element_bits (type));
13337 return TYPE_SIZE (TREE_TYPE (type));
13340 /* Verify that basic properties of T match TV and thus T can be a variant of
13341 TV. TV should be the more specified variant (i.e. the main variant). */
13343 static bool
13344 verify_type_variant (const_tree t, tree tv)
13346 /* Type variant can differ by:
13348 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13349 ENCODE_QUAL_ADDR_SPACE.
13350 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13351 in this case some values may not be set in the variant types
13352 (see TYPE_COMPLETE_P checks).
13353 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13354 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13355 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13356 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13357 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13358 this is necessary to make it possible to merge types form different TUs
13359 - arrays, pointers and references may have TREE_TYPE that is a variant
13360 of TREE_TYPE of their main variants.
13361 - aggregates may have new TYPE_FIELDS list that list variants of
13362 the main variant TYPE_FIELDS.
13363 - vector types may differ by TYPE_VECTOR_OPAQUE
13366 /* Convenience macro for matching individual fields. */
13367 #define verify_variant_match(flag) \
13368 do { \
13369 if (flag (tv) != flag (t)) \
13371 error ("type variant differs by %s", #flag); \
13372 debug_tree (tv); \
13373 return false; \
13375 } while (false)
13377 /* tree_base checks. */
13379 verify_variant_match (TREE_CODE);
13380 /* FIXME: Ada builds non-artificial variants of artificial types. */
13381 #if 0
13382 if (TYPE_ARTIFICIAL (tv))
13383 verify_variant_match (TYPE_ARTIFICIAL);
13384 #endif
13385 if (POINTER_TYPE_P (tv))
13386 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13387 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13388 verify_variant_match (TYPE_UNSIGNED);
13389 verify_variant_match (TYPE_PACKED);
13390 if (TREE_CODE (t) == REFERENCE_TYPE)
13391 verify_variant_match (TYPE_REF_IS_RVALUE);
13392 if (AGGREGATE_TYPE_P (t))
13393 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13394 else
13395 verify_variant_match (TYPE_SATURATING);
13396 /* FIXME: This check trigger during libstdc++ build. */
13397 #if 0
13398 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13399 verify_variant_match (TYPE_FINAL_P);
13400 #endif
13402 /* tree_type_common checks. */
13404 if (COMPLETE_TYPE_P (t))
13406 verify_variant_match (TYPE_MODE);
13407 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13408 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13409 verify_variant_match (TYPE_SIZE);
13410 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13411 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13412 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13414 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13415 TYPE_SIZE_UNIT (tv), 0));
13416 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13417 debug_tree (tv);
13418 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13419 debug_tree (TYPE_SIZE_UNIT (tv));
13420 error ("type%'s %<TYPE_SIZE_UNIT%>");
13421 debug_tree (TYPE_SIZE_UNIT (t));
13422 return false;
13424 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13426 verify_variant_match (TYPE_PRECISION);
13427 if (RECORD_OR_UNION_TYPE_P (t))
13428 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13429 else if (TREE_CODE (t) == ARRAY_TYPE)
13430 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13431 /* During LTO we merge variant lists from diferent translation units
13432 that may differ BY TYPE_CONTEXT that in turn may point
13433 to TRANSLATION_UNIT_DECL.
13434 Ada also builds variants of types with different TYPE_CONTEXT. */
13435 #if 0
13436 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13437 verify_variant_match (TYPE_CONTEXT);
13438 #endif
13439 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13440 verify_variant_match (TYPE_STRING_FLAG);
13441 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13442 verify_variant_match (TYPE_CXX_ODR_P);
13443 if (TYPE_ALIAS_SET_KNOWN_P (t))
13445 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13446 debug_tree (tv);
13447 return false;
13450 /* tree_type_non_common checks. */
13452 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13453 and dangle the pointer from time to time. */
13454 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13455 && (in_lto_p || !TYPE_VFIELD (tv)
13456 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13458 error ("type variant has different %<TYPE_VFIELD%>");
13459 debug_tree (tv);
13460 return false;
13462 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13463 || TREE_CODE (t) == INTEGER_TYPE
13464 || TREE_CODE (t) == BOOLEAN_TYPE
13465 || TREE_CODE (t) == REAL_TYPE
13466 || TREE_CODE (t) == FIXED_POINT_TYPE)
13468 verify_variant_match (TYPE_MAX_VALUE);
13469 verify_variant_match (TYPE_MIN_VALUE);
13471 if (TREE_CODE (t) == METHOD_TYPE)
13472 verify_variant_match (TYPE_METHOD_BASETYPE);
13473 if (TREE_CODE (t) == OFFSET_TYPE)
13474 verify_variant_match (TYPE_OFFSET_BASETYPE);
13475 if (TREE_CODE (t) == ARRAY_TYPE)
13476 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13477 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13478 or even type's main variant. This is needed to make bootstrap pass
13479 and the bug seems new in GCC 5.
13480 C++ FE should be updated to make this consistent and we should check
13481 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13482 is a match with main variant.
13484 Also disable the check for Java for now because of parser hack that builds
13485 first an dummy BINFO and then sometimes replace it by real BINFO in some
13486 of the copies. */
13487 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13488 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13489 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13490 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13491 at LTO time only. */
13492 && (in_lto_p && odr_type_p (t)))
13494 error ("type variant has different %<TYPE_BINFO%>");
13495 debug_tree (tv);
13496 error ("type variant%'s %<TYPE_BINFO%>");
13497 debug_tree (TYPE_BINFO (tv));
13498 error ("type%'s %<TYPE_BINFO%>");
13499 debug_tree (TYPE_BINFO (t));
13500 return false;
13503 /* Check various uses of TYPE_VALUES_RAW. */
13504 if (TREE_CODE (t) == ENUMERAL_TYPE
13505 && TYPE_VALUES (t))
13506 verify_variant_match (TYPE_VALUES);
13507 else if (TREE_CODE (t) == ARRAY_TYPE)
13508 verify_variant_match (TYPE_DOMAIN);
13509 /* Permit incomplete variants of complete type. While FEs may complete
13510 all variants, this does not happen for C++ templates in all cases. */
13511 else if (RECORD_OR_UNION_TYPE_P (t)
13512 && COMPLETE_TYPE_P (t)
13513 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13515 tree f1, f2;
13517 /* Fortran builds qualified variants as new records with items of
13518 qualified type. Verify that they looks same. */
13519 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13520 f1 && f2;
13521 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13522 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13523 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13524 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13525 /* FIXME: gfc_nonrestricted_type builds all types as variants
13526 with exception of pointer types. It deeply copies the type
13527 which means that we may end up with a variant type
13528 referring non-variant pointer. We may change it to
13529 produce types as variants, too, like
13530 objc_get_protocol_qualified_type does. */
13531 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13532 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13533 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13534 break;
13535 if (f1 || f2)
13537 error ("type variant has different %<TYPE_FIELDS%>");
13538 debug_tree (tv);
13539 error ("first mismatch is field");
13540 debug_tree (f1);
13541 error ("and field");
13542 debug_tree (f2);
13543 return false;
13546 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13547 verify_variant_match (TYPE_ARG_TYPES);
13548 /* For C++ the qualified variant of array type is really an array type
13549 of qualified TREE_TYPE.
13550 objc builds variants of pointer where pointer to type is a variant, too
13551 in objc_get_protocol_qualified_type. */
13552 if (TREE_TYPE (t) != TREE_TYPE (tv)
13553 && ((TREE_CODE (t) != ARRAY_TYPE
13554 && !POINTER_TYPE_P (t))
13555 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13556 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13558 error ("type variant has different %<TREE_TYPE%>");
13559 debug_tree (tv);
13560 error ("type variant%'s %<TREE_TYPE%>");
13561 debug_tree (TREE_TYPE (tv));
13562 error ("type%'s %<TREE_TYPE%>");
13563 debug_tree (TREE_TYPE (t));
13564 return false;
13566 if (type_with_alias_set_p (t)
13567 && !gimple_canonical_types_compatible_p (t, tv, false))
13569 error ("type is not compatible with its variant");
13570 debug_tree (tv);
13571 error ("type variant%'s %<TREE_TYPE%>");
13572 debug_tree (TREE_TYPE (tv));
13573 error ("type%'s %<TREE_TYPE%>");
13574 debug_tree (TREE_TYPE (t));
13575 return false;
13577 return true;
13578 #undef verify_variant_match
13582 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13583 the middle-end types_compatible_p function. It needs to avoid
13584 claiming types are different for types that should be treated
13585 the same with respect to TBAA. Canonical types are also used
13586 for IL consistency checks via the useless_type_conversion_p
13587 predicate which does not handle all type kinds itself but falls
13588 back to pointer-comparison of TYPE_CANONICAL for aggregates
13589 for example. */
13591 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13592 type calculation because we need to allow inter-operability between signed
13593 and unsigned variants. */
13595 bool
13596 type_with_interoperable_signedness (const_tree type)
13598 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13599 signed char and unsigned char. Similarly fortran FE builds
13600 C_SIZE_T as signed type, while C defines it unsigned. */
13602 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13603 == INTEGER_TYPE
13604 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13605 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13608 /* Return true iff T1 and T2 are structurally identical for what
13609 TBAA is concerned.
13610 This function is used both by lto.cc canonical type merging and by the
13611 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13612 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13613 only for LTO because only in these cases TYPE_CANONICAL equivalence
13614 correspond to one defined by gimple_canonical_types_compatible_p. */
13616 bool
13617 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13618 bool trust_type_canonical)
13620 /* Type variants should be same as the main variant. When not doing sanity
13621 checking to verify this fact, go to main variants and save some work. */
13622 if (trust_type_canonical)
13624 t1 = TYPE_MAIN_VARIANT (t1);
13625 t2 = TYPE_MAIN_VARIANT (t2);
13628 /* Check first for the obvious case of pointer identity. */
13629 if (t1 == t2)
13630 return true;
13632 /* Check that we have two types to compare. */
13633 if (t1 == NULL_TREE || t2 == NULL_TREE)
13634 return false;
13636 /* We consider complete types always compatible with incomplete type.
13637 This does not make sense for canonical type calculation and thus we
13638 need to ensure that we are never called on it.
13640 FIXME: For more correctness the function probably should have three modes
13641 1) mode assuming that types are complete mathcing their structure
13642 2) mode allowing incomplete types but producing equivalence classes
13643 and thus ignoring all info from complete types
13644 3) mode allowing incomplete types to match complete but checking
13645 compatibility between complete types.
13647 1 and 2 can be used for canonical type calculation. 3 is the real
13648 definition of type compatibility that can be used i.e. for warnings during
13649 declaration merging. */
13651 gcc_assert (!trust_type_canonical
13652 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13654 /* If the types have been previously registered and found equal
13655 they still are. */
13657 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13658 && trust_type_canonical)
13660 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13661 they are always NULL, but they are set to non-NULL for types
13662 constructed by build_pointer_type and variants. In this case the
13663 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13664 all pointers are considered equal. Be sure to not return false
13665 negatives. */
13666 gcc_checking_assert (canonical_type_used_p (t1)
13667 && canonical_type_used_p (t2));
13668 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13671 /* For types where we do ODR based TBAA the canonical type is always
13672 set correctly, so we know that types are different if their
13673 canonical types does not match. */
13674 if (trust_type_canonical
13675 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13676 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13677 return false;
13679 /* Can't be the same type if the types don't have the same code. */
13680 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13681 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13682 return false;
13684 /* Qualifiers do not matter for canonical type comparison purposes. */
13686 /* Void types and nullptr types are always the same. */
13687 if (TREE_CODE (t1) == VOID_TYPE
13688 || TREE_CODE (t1) == NULLPTR_TYPE)
13689 return true;
13691 /* Can't be the same type if they have different mode. */
13692 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13693 return false;
13695 /* Non-aggregate types can be handled cheaply. */
13696 if (INTEGRAL_TYPE_P (t1)
13697 || SCALAR_FLOAT_TYPE_P (t1)
13698 || FIXED_POINT_TYPE_P (t1)
13699 || TREE_CODE (t1) == VECTOR_TYPE
13700 || TREE_CODE (t1) == COMPLEX_TYPE
13701 || TREE_CODE (t1) == OFFSET_TYPE
13702 || POINTER_TYPE_P (t1))
13704 /* Can't be the same type if they have different recision. */
13705 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13706 return false;
13708 /* In some cases the signed and unsigned types are required to be
13709 inter-operable. */
13710 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13711 && !type_with_interoperable_signedness (t1))
13712 return false;
13714 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13715 interoperable with "signed char". Unless all frontends are revisited
13716 to agree on these types, we must ignore the flag completely. */
13718 /* Fortran standard define C_PTR type that is compatible with every
13719 C pointer. For this reason we need to glob all pointers into one.
13720 Still pointers in different address spaces are not compatible. */
13721 if (POINTER_TYPE_P (t1))
13723 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13724 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13725 return false;
13728 /* Tail-recurse to components. */
13729 if (TREE_CODE (t1) == VECTOR_TYPE
13730 || TREE_CODE (t1) == COMPLEX_TYPE)
13731 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13732 TREE_TYPE (t2),
13733 trust_type_canonical);
13735 return true;
13738 /* Do type-specific comparisons. */
13739 switch (TREE_CODE (t1))
13741 case ARRAY_TYPE:
13742 /* Array types are the same if the element types are the same and
13743 the number of elements are the same. */
13744 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13745 trust_type_canonical)
13746 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13747 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13748 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13749 return false;
13750 else
13752 tree i1 = TYPE_DOMAIN (t1);
13753 tree i2 = TYPE_DOMAIN (t2);
13755 /* For an incomplete external array, the type domain can be
13756 NULL_TREE. Check this condition also. */
13757 if (i1 == NULL_TREE && i2 == NULL_TREE)
13758 return true;
13759 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13760 return false;
13761 else
13763 tree min1 = TYPE_MIN_VALUE (i1);
13764 tree min2 = TYPE_MIN_VALUE (i2);
13765 tree max1 = TYPE_MAX_VALUE (i1);
13766 tree max2 = TYPE_MAX_VALUE (i2);
13768 /* The minimum/maximum values have to be the same. */
13769 if ((min1 == min2
13770 || (min1 && min2
13771 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13772 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13773 || operand_equal_p (min1, min2, 0))))
13774 && (max1 == max2
13775 || (max1 && max2
13776 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13777 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13778 || operand_equal_p (max1, max2, 0)))))
13779 return true;
13780 else
13781 return false;
13785 case METHOD_TYPE:
13786 case FUNCTION_TYPE:
13787 /* Function types are the same if the return type and arguments types
13788 are the same. */
13789 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13790 trust_type_canonical))
13791 return false;
13793 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13794 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13795 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13796 return true;
13797 else
13799 tree parms1, parms2;
13801 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13802 parms1 && parms2;
13803 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13805 if (!gimple_canonical_types_compatible_p
13806 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13807 trust_type_canonical))
13808 return false;
13811 if (parms1 || parms2)
13812 return false;
13814 return true;
13817 case RECORD_TYPE:
13818 case UNION_TYPE:
13819 case QUAL_UNION_TYPE:
13821 tree f1, f2;
13823 /* Don't try to compare variants of an incomplete type, before
13824 TYPE_FIELDS has been copied around. */
13825 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13826 return true;
13829 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13830 return false;
13832 /* For aggregate types, all the fields must be the same. */
13833 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13834 f1 || f2;
13835 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13837 /* Skip non-fields and zero-sized fields. */
13838 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13839 || (DECL_SIZE (f1)
13840 && integer_zerop (DECL_SIZE (f1)))))
13841 f1 = TREE_CHAIN (f1);
13842 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13843 || (DECL_SIZE (f2)
13844 && integer_zerop (DECL_SIZE (f2)))))
13845 f2 = TREE_CHAIN (f2);
13846 if (!f1 || !f2)
13847 break;
13848 /* The fields must have the same name, offset and type. */
13849 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13850 || !gimple_compare_field_offset (f1, f2)
13851 || !gimple_canonical_types_compatible_p
13852 (TREE_TYPE (f1), TREE_TYPE (f2),
13853 trust_type_canonical))
13854 return false;
13857 /* If one aggregate has more fields than the other, they
13858 are not the same. */
13859 if (f1 || f2)
13860 return false;
13862 return true;
13865 default:
13866 /* Consider all types with language specific trees in them mutually
13867 compatible. This is executed only from verify_type and false
13868 positives can be tolerated. */
13869 gcc_assert (!in_lto_p);
13870 return true;
13874 /* For OPAQUE_TYPE T, it should have only size and alignment information
13875 and its mode should be of class MODE_OPAQUE. This function verifies
13876 these properties of T match TV which is the main variant of T and TC
13877 which is the canonical of T. */
13879 static void
13880 verify_opaque_type (const_tree t, tree tv, tree tc)
13882 gcc_assert (OPAQUE_TYPE_P (t));
13883 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13884 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13886 /* For an opaque type T1, check if some of its properties match
13887 the corresponding ones of the other opaque type T2, emit some
13888 error messages for those inconsistent ones. */
13889 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13890 const char *kind_msg)
13892 if (!OPAQUE_TYPE_P (t2))
13894 error ("type %s is not an opaque type", kind_msg);
13895 debug_tree (t2);
13896 return;
13898 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13900 error ("type %s is not with opaque mode", kind_msg);
13901 debug_tree (t2);
13902 return;
13904 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13906 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13907 debug_tree (t2);
13908 return;
13910 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13911 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13912 if (maybe_ne (t1_size, t2_size))
13914 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13915 debug_tree (t2);
13916 return;
13918 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13920 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13921 debug_tree (t2);
13922 return;
13924 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13926 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13927 debug_tree (t2);
13928 return;
13932 if (t != tv)
13933 check_properties_for_opaque_type (t, tv, "variant");
13935 if (t != tc)
13936 check_properties_for_opaque_type (t, tc, "canonical");
13939 /* Verify type T. */
13941 void
13942 verify_type (const_tree t)
13944 bool error_found = false;
13945 tree mv = TYPE_MAIN_VARIANT (t);
13946 tree ct = TYPE_CANONICAL (t);
13948 if (OPAQUE_TYPE_P (t))
13950 verify_opaque_type (t, mv, ct);
13951 return;
13954 if (!mv)
13956 error ("main variant is not defined");
13957 error_found = true;
13959 else if (mv != TYPE_MAIN_VARIANT (mv))
13961 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13962 debug_tree (mv);
13963 error_found = true;
13965 else if (t != mv && !verify_type_variant (t, mv))
13966 error_found = true;
13968 if (!ct)
13970 else if (TYPE_CANONICAL (ct) != ct)
13972 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13973 debug_tree (ct);
13974 error_found = true;
13976 /* Method and function types cannot be used to address memory and thus
13977 TYPE_CANONICAL really matters only for determining useless conversions.
13979 FIXME: C++ FE produce declarations of builtin functions that are not
13980 compatible with main variants. */
13981 else if (TREE_CODE (t) == FUNCTION_TYPE)
13983 else if (t != ct
13984 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13985 with variably sized arrays because their sizes possibly
13986 gimplified to different variables. */
13987 && !variably_modified_type_p (ct, NULL)
13988 && !gimple_canonical_types_compatible_p (t, ct, false)
13989 && COMPLETE_TYPE_P (t))
13991 error ("%<TYPE_CANONICAL%> is not compatible");
13992 debug_tree (ct);
13993 error_found = true;
13996 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13997 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13999 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14000 debug_tree (ct);
14001 error_found = true;
14003 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14005 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14006 debug_tree (ct);
14007 debug_tree (TYPE_MAIN_VARIANT (ct));
14008 error_found = true;
14012 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14013 if (RECORD_OR_UNION_TYPE_P (t))
14015 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14016 and danagle the pointer from time to time. */
14017 if (TYPE_VFIELD (t)
14018 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14019 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14021 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14022 debug_tree (TYPE_VFIELD (t));
14023 error_found = true;
14026 else if (TREE_CODE (t) == POINTER_TYPE)
14028 if (TYPE_NEXT_PTR_TO (t)
14029 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14031 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14032 debug_tree (TYPE_NEXT_PTR_TO (t));
14033 error_found = true;
14036 else if (TREE_CODE (t) == REFERENCE_TYPE)
14038 if (TYPE_NEXT_REF_TO (t)
14039 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14041 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14042 debug_tree (TYPE_NEXT_REF_TO (t));
14043 error_found = true;
14046 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14047 || TREE_CODE (t) == FIXED_POINT_TYPE)
14049 /* FIXME: The following check should pass:
14050 useless_type_conversion_p (const_cast <tree> (t),
14051 TREE_TYPE (TYPE_MIN_VALUE (t))
14052 but does not for C sizetypes in LTO. */
14055 /* Check various uses of TYPE_MAXVAL_RAW. */
14056 if (RECORD_OR_UNION_TYPE_P (t))
14058 if (!TYPE_BINFO (t))
14060 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14062 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14063 debug_tree (TYPE_BINFO (t));
14064 error_found = true;
14066 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14068 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14069 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14070 error_found = true;
14073 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14075 if (TYPE_METHOD_BASETYPE (t)
14076 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14077 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14079 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14080 debug_tree (TYPE_METHOD_BASETYPE (t));
14081 error_found = true;
14084 else if (TREE_CODE (t) == OFFSET_TYPE)
14086 if (TYPE_OFFSET_BASETYPE (t)
14087 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14088 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14090 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14091 debug_tree (TYPE_OFFSET_BASETYPE (t));
14092 error_found = true;
14095 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14096 || TREE_CODE (t) == FIXED_POINT_TYPE)
14098 /* FIXME: The following check should pass:
14099 useless_type_conversion_p (const_cast <tree> (t),
14100 TREE_TYPE (TYPE_MAX_VALUE (t))
14101 but does not for C sizetypes in LTO. */
14103 else if (TREE_CODE (t) == ARRAY_TYPE)
14105 if (TYPE_ARRAY_MAX_SIZE (t)
14106 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14108 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14109 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14110 error_found = true;
14113 else if (TYPE_MAX_VALUE_RAW (t))
14115 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14116 debug_tree (TYPE_MAX_VALUE_RAW (t));
14117 error_found = true;
14120 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14122 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14123 debug_tree (TYPE_LANG_SLOT_1 (t));
14124 error_found = true;
14127 /* Check various uses of TYPE_VALUES_RAW. */
14128 if (TREE_CODE (t) == ENUMERAL_TYPE)
14129 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14131 tree value = TREE_VALUE (l);
14132 tree name = TREE_PURPOSE (l);
14134 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14135 CONST_DECL of ENUMERAL TYPE. */
14136 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14138 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14139 debug_tree (value);
14140 debug_tree (name);
14141 error_found = true;
14143 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14144 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14145 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14147 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14148 "to the enum");
14149 debug_tree (value);
14150 debug_tree (name);
14151 error_found = true;
14153 if (TREE_CODE (name) != IDENTIFIER_NODE)
14155 error ("enum value name is not %<IDENTIFIER_NODE%>");
14156 debug_tree (value);
14157 debug_tree (name);
14158 error_found = true;
14161 else if (TREE_CODE (t) == ARRAY_TYPE)
14163 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14165 error ("array %<TYPE_DOMAIN%> is not integer type");
14166 debug_tree (TYPE_DOMAIN (t));
14167 error_found = true;
14170 else if (RECORD_OR_UNION_TYPE_P (t))
14172 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14174 error ("%<TYPE_FIELDS%> defined in incomplete type");
14175 error_found = true;
14177 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14179 /* TODO: verify properties of decls. */
14180 if (TREE_CODE (fld) == FIELD_DECL)
14182 else if (TREE_CODE (fld) == TYPE_DECL)
14184 else if (TREE_CODE (fld) == CONST_DECL)
14186 else if (VAR_P (fld))
14188 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14190 else if (TREE_CODE (fld) == USING_DECL)
14192 else if (TREE_CODE (fld) == FUNCTION_DECL)
14194 else
14196 error ("wrong tree in %<TYPE_FIELDS%> list");
14197 debug_tree (fld);
14198 error_found = true;
14202 else if (TREE_CODE (t) == INTEGER_TYPE
14203 || TREE_CODE (t) == BOOLEAN_TYPE
14204 || TREE_CODE (t) == OFFSET_TYPE
14205 || TREE_CODE (t) == REFERENCE_TYPE
14206 || TREE_CODE (t) == NULLPTR_TYPE
14207 || TREE_CODE (t) == POINTER_TYPE)
14209 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14211 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14212 "is %p",
14213 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14214 error_found = true;
14216 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14218 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14219 debug_tree (TYPE_CACHED_VALUES (t));
14220 error_found = true;
14222 /* Verify just enough of cache to ensure that no one copied it to new type.
14223 All copying should go by copy_node that should clear it. */
14224 else if (TYPE_CACHED_VALUES_P (t))
14226 int i;
14227 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14228 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14229 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14231 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14232 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14233 error_found = true;
14234 break;
14238 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14239 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14241 /* C++ FE uses TREE_PURPOSE to store initial values. */
14242 if (TREE_PURPOSE (l) && in_lto_p)
14244 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14245 debug_tree (l);
14246 error_found = true;
14248 if (!TYPE_P (TREE_VALUE (l)))
14250 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14251 debug_tree (l);
14252 error_found = true;
14255 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14257 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14258 debug_tree (TYPE_VALUES_RAW (t));
14259 error_found = true;
14261 if (TREE_CODE (t) != INTEGER_TYPE
14262 && TREE_CODE (t) != BOOLEAN_TYPE
14263 && TREE_CODE (t) != OFFSET_TYPE
14264 && TREE_CODE (t) != REFERENCE_TYPE
14265 && TREE_CODE (t) != NULLPTR_TYPE
14266 && TREE_CODE (t) != POINTER_TYPE
14267 && TYPE_CACHED_VALUES_P (t))
14269 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14270 error_found = true;
14273 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14274 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14275 of a type. */
14276 if (TREE_CODE (t) == METHOD_TYPE
14277 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14279 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14280 error_found = true;
14283 if (error_found)
14285 debug_tree (const_cast <tree> (t));
14286 internal_error ("%qs failed", __func__);
14291 /* Return 1 if ARG interpreted as signed in its precision is known to be
14292 always positive or 2 if ARG is known to be always negative, or 3 if
14293 ARG may be positive or negative. */
14296 get_range_pos_neg (tree arg)
14298 if (arg == error_mark_node)
14299 return 3;
14301 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14302 int cnt = 0;
14303 if (TREE_CODE (arg) == INTEGER_CST)
14305 wide_int w = wi::sext (wi::to_wide (arg), prec);
14306 if (wi::neg_p (w))
14307 return 2;
14308 else
14309 return 1;
14311 while (CONVERT_EXPR_P (arg)
14312 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14313 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14315 arg = TREE_OPERAND (arg, 0);
14316 /* Narrower value zero extended into wider type
14317 will always result in positive values. */
14318 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14319 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14320 return 1;
14321 prec = TYPE_PRECISION (TREE_TYPE (arg));
14322 if (++cnt > 30)
14323 return 3;
14326 if (TREE_CODE (arg) != SSA_NAME)
14327 return 3;
14328 value_range r;
14329 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14331 gimple *g = SSA_NAME_DEF_STMT (arg);
14332 if (is_gimple_assign (g)
14333 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14335 tree t = gimple_assign_rhs1 (g);
14336 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14337 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14339 if (TYPE_UNSIGNED (TREE_TYPE (t))
14340 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14341 return 1;
14342 prec = TYPE_PRECISION (TREE_TYPE (t));
14343 arg = t;
14344 if (++cnt > 30)
14345 return 3;
14346 continue;
14349 return 3;
14351 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14353 /* For unsigned values, the "positive" range comes
14354 below the "negative" range. */
14355 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14356 return 1;
14357 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14358 return 2;
14360 else
14362 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14363 return 1;
14364 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14365 return 2;
14367 return 3;
14373 /* Return true if ARG is marked with the nonnull attribute in the
14374 current function signature. */
14376 bool
14377 nonnull_arg_p (const_tree arg)
14379 tree t, attrs, fntype;
14380 unsigned HOST_WIDE_INT arg_num;
14382 gcc_assert (TREE_CODE (arg) == PARM_DECL
14383 && (POINTER_TYPE_P (TREE_TYPE (arg))
14384 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14386 /* The static chain decl is always non null. */
14387 if (arg == cfun->static_chain_decl)
14388 return true;
14390 /* THIS argument of method is always non-NULL. */
14391 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14392 && arg == DECL_ARGUMENTS (cfun->decl)
14393 && flag_delete_null_pointer_checks)
14394 return true;
14396 /* Values passed by reference are always non-NULL. */
14397 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14398 && flag_delete_null_pointer_checks)
14399 return true;
14401 fntype = TREE_TYPE (cfun->decl);
14402 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14404 attrs = lookup_attribute ("nonnull", attrs);
14406 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14407 if (attrs == NULL_TREE)
14408 return false;
14410 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14411 if (TREE_VALUE (attrs) == NULL_TREE)
14412 return true;
14414 /* Get the position number for ARG in the function signature. */
14415 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14417 t = DECL_CHAIN (t), arg_num++)
14419 if (t == arg)
14420 break;
14423 gcc_assert (t == arg);
14425 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14426 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14428 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14429 return true;
14433 return false;
14436 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14437 information. */
14439 location_t
14440 set_block (location_t loc, tree block)
14442 location_t pure_loc = get_pure_location (loc);
14443 source_range src_range = get_range_from_loc (line_table, loc);
14444 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14445 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14448 location_t
14449 set_source_range (tree expr, location_t start, location_t finish)
14451 source_range src_range;
14452 src_range.m_start = start;
14453 src_range.m_finish = finish;
14454 return set_source_range (expr, src_range);
14457 location_t
14458 set_source_range (tree expr, source_range src_range)
14460 if (!EXPR_P (expr))
14461 return UNKNOWN_LOCATION;
14463 location_t expr_location = EXPR_LOCATION (expr);
14464 location_t pure_loc = get_pure_location (expr_location);
14465 unsigned discriminator = get_discriminator_from_loc (expr_location);
14466 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14467 pure_loc,
14468 src_range,
14469 NULL,
14470 discriminator);
14471 SET_EXPR_LOCATION (expr, adhoc);
14472 return adhoc;
14475 /* Return EXPR, potentially wrapped with a node expression LOC,
14476 if !CAN_HAVE_LOCATION_P (expr).
14478 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14479 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14481 Wrapper nodes can be identified using location_wrapper_p. */
14483 tree
14484 maybe_wrap_with_location (tree expr, location_t loc)
14486 if (expr == NULL)
14487 return NULL;
14488 if (loc == UNKNOWN_LOCATION)
14489 return expr;
14490 if (CAN_HAVE_LOCATION_P (expr))
14491 return expr;
14492 /* We should only be adding wrappers for constants and for decls,
14493 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14494 gcc_assert (CONSTANT_CLASS_P (expr)
14495 || DECL_P (expr)
14496 || EXCEPTIONAL_CLASS_P (expr));
14498 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14499 any impact of the wrapper nodes. */
14500 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14501 return expr;
14503 /* Compiler-generated temporary variables don't need a wrapper. */
14504 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14505 return expr;
14507 /* If any auto_suppress_location_wrappers are active, don't create
14508 wrappers. */
14509 if (suppress_location_wrappers > 0)
14510 return expr;
14512 tree_code code
14513 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14514 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14515 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14516 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14517 /* Mark this node as being a wrapper. */
14518 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14519 return wrapper;
14522 int suppress_location_wrappers;
14524 /* Return the name of combined function FN, for debugging purposes. */
14526 const char *
14527 combined_fn_name (combined_fn fn)
14529 if (builtin_fn_p (fn))
14531 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14532 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14534 else
14535 return internal_fn_name (as_internal_fn (fn));
14538 /* Return a bitmap with a bit set corresponding to each argument in
14539 a function call type FNTYPE declared with attribute nonnull,
14540 or null if none of the function's argument are nonnull. The caller
14541 must free the bitmap. */
14543 bitmap
14544 get_nonnull_args (const_tree fntype)
14546 if (fntype == NULL_TREE)
14547 return NULL;
14549 bitmap argmap = NULL;
14550 if (TREE_CODE (fntype) == METHOD_TYPE)
14552 /* The this pointer in C++ non-static member functions is
14553 implicitly nonnull whether or not it's declared as such. */
14554 argmap = BITMAP_ALLOC (NULL);
14555 bitmap_set_bit (argmap, 0);
14558 tree attrs = TYPE_ATTRIBUTES (fntype);
14559 if (!attrs)
14560 return argmap;
14562 /* A function declaration can specify multiple attribute nonnull,
14563 each with zero or more arguments. The loop below creates a bitmap
14564 representing a union of all the arguments. An empty (but non-null)
14565 bitmap means that all arguments have been declaraed nonnull. */
14566 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14568 attrs = lookup_attribute ("nonnull", attrs);
14569 if (!attrs)
14570 break;
14572 if (!argmap)
14573 argmap = BITMAP_ALLOC (NULL);
14575 if (!TREE_VALUE (attrs))
14577 /* Clear the bitmap in case a previous attribute nonnull
14578 set it and this one overrides it for all arguments. */
14579 bitmap_clear (argmap);
14580 return argmap;
14583 /* Iterate over the indices of the format arguments declared nonnull
14584 and set a bit for each. */
14585 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14587 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14588 bitmap_set_bit (argmap, val);
14592 return argmap;
14595 /* Returns true if TYPE is a type where it and all of its subobjects
14596 (recursively) are of structure, union, or array type. */
14598 bool
14599 is_empty_type (const_tree type)
14601 if (RECORD_OR_UNION_TYPE_P (type))
14603 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14604 if (TREE_CODE (field) == FIELD_DECL
14605 && !DECL_PADDING_P (field)
14606 && !is_empty_type (TREE_TYPE (field)))
14607 return false;
14608 return true;
14610 else if (TREE_CODE (type) == ARRAY_TYPE)
14611 return (integer_minus_onep (array_type_nelts (type))
14612 || TYPE_DOMAIN (type) == NULL_TREE
14613 || is_empty_type (TREE_TYPE (type)));
14614 return false;
14617 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14618 that shouldn't be passed via stack. */
14620 bool
14621 default_is_empty_record (const_tree type)
14623 if (!abi_version_at_least (12))
14624 return false;
14626 if (type == error_mark_node)
14627 return false;
14629 if (TREE_ADDRESSABLE (type))
14630 return false;
14632 return is_empty_type (TYPE_MAIN_VARIANT (type));
14635 /* Determine whether TYPE is a structure with a flexible array member,
14636 or a union containing such a structure (possibly recursively). */
14638 bool
14639 flexible_array_type_p (const_tree type)
14641 tree x, last;
14642 switch (TREE_CODE (type))
14644 case RECORD_TYPE:
14645 last = NULL_TREE;
14646 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14647 if (TREE_CODE (x) == FIELD_DECL)
14648 last = x;
14649 if (last == NULL_TREE)
14650 return false;
14651 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14652 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14653 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14654 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14655 return true;
14656 return false;
14657 case UNION_TYPE:
14658 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14660 if (TREE_CODE (x) == FIELD_DECL
14661 && flexible_array_type_p (TREE_TYPE (x)))
14662 return true;
14664 return false;
14665 default:
14666 return false;
14670 /* Like int_size_in_bytes, but handle empty records specially. */
14672 HOST_WIDE_INT
14673 arg_int_size_in_bytes (const_tree type)
14675 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14678 /* Like size_in_bytes, but handle empty records specially. */
14680 tree
14681 arg_size_in_bytes (const_tree type)
14683 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14686 /* Return true if an expression with CODE has to have the same result type as
14687 its first operand. */
14689 bool
14690 expr_type_first_operand_type_p (tree_code code)
14692 switch (code)
14694 case NEGATE_EXPR:
14695 case ABS_EXPR:
14696 case BIT_NOT_EXPR:
14697 case PAREN_EXPR:
14698 case CONJ_EXPR:
14700 case PLUS_EXPR:
14701 case MINUS_EXPR:
14702 case MULT_EXPR:
14703 case TRUNC_DIV_EXPR:
14704 case CEIL_DIV_EXPR:
14705 case FLOOR_DIV_EXPR:
14706 case ROUND_DIV_EXPR:
14707 case TRUNC_MOD_EXPR:
14708 case CEIL_MOD_EXPR:
14709 case FLOOR_MOD_EXPR:
14710 case ROUND_MOD_EXPR:
14711 case RDIV_EXPR:
14712 case EXACT_DIV_EXPR:
14713 case MIN_EXPR:
14714 case MAX_EXPR:
14715 case BIT_IOR_EXPR:
14716 case BIT_XOR_EXPR:
14717 case BIT_AND_EXPR:
14719 case LSHIFT_EXPR:
14720 case RSHIFT_EXPR:
14721 case LROTATE_EXPR:
14722 case RROTATE_EXPR:
14723 return true;
14725 default:
14726 return false;
14730 /* Return a typenode for the "standard" C type with a given name. */
14731 tree
14732 get_typenode_from_name (const char *name)
14734 if (name == NULL || *name == '\0')
14735 return NULL_TREE;
14737 if (strcmp (name, "char") == 0)
14738 return char_type_node;
14739 if (strcmp (name, "unsigned char") == 0)
14740 return unsigned_char_type_node;
14741 if (strcmp (name, "signed char") == 0)
14742 return signed_char_type_node;
14744 if (strcmp (name, "short int") == 0)
14745 return short_integer_type_node;
14746 if (strcmp (name, "short unsigned int") == 0)
14747 return short_unsigned_type_node;
14749 if (strcmp (name, "int") == 0)
14750 return integer_type_node;
14751 if (strcmp (name, "unsigned int") == 0)
14752 return unsigned_type_node;
14754 if (strcmp (name, "long int") == 0)
14755 return long_integer_type_node;
14756 if (strcmp (name, "long unsigned int") == 0)
14757 return long_unsigned_type_node;
14759 if (strcmp (name, "long long int") == 0)
14760 return long_long_integer_type_node;
14761 if (strcmp (name, "long long unsigned int") == 0)
14762 return long_long_unsigned_type_node;
14764 gcc_unreachable ();
14767 /* List of pointer types used to declare builtins before we have seen their
14768 real declaration.
14770 Keep the size up to date in tree.h ! */
14771 const builtin_structptr_type builtin_structptr_types[6] =
14773 { fileptr_type_node, ptr_type_node, "FILE" },
14774 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14775 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14776 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14777 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14778 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14781 /* Return the maximum object size. */
14783 tree
14784 max_object_size (void)
14786 /* To do: Make this a configurable parameter. */
14787 return TYPE_MAX_VALUE (ptrdiff_type_node);
14790 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14791 parameter default to false and that weeds out error_mark_node. */
14793 bool
14794 verify_type_context (location_t loc, type_context_kind context,
14795 const_tree type, bool silent_p)
14797 if (type == error_mark_node)
14798 return true;
14800 gcc_assert (TYPE_P (type));
14801 return (!targetm.verify_type_context
14802 || targetm.verify_type_context (loc, context, type, silent_p));
14805 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14806 delete operators. Return false if they may or may not name such
14807 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14808 do not. */
14810 bool
14811 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14812 bool *pcertain /* = NULL */)
14814 bool certain;
14815 if (!pcertain)
14816 pcertain = &certain;
14818 const char *new_name = IDENTIFIER_POINTER (new_asm);
14819 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14820 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14821 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14823 /* The following failures are due to invalid names so they're not
14824 considered certain mismatches. */
14825 *pcertain = false;
14827 if (new_len < 5 || delete_len < 6)
14828 return false;
14829 if (new_name[0] == '_')
14830 ++new_name, --new_len;
14831 if (new_name[0] == '_')
14832 ++new_name, --new_len;
14833 if (delete_name[0] == '_')
14834 ++delete_name, --delete_len;
14835 if (delete_name[0] == '_')
14836 ++delete_name, --delete_len;
14837 if (new_len < 4 || delete_len < 5)
14838 return false;
14840 /* The following failures are due to names of user-defined operators
14841 so they're also not considered certain mismatches. */
14843 /* *_len is now just the length after initial underscores. */
14844 if (new_name[0] != 'Z' || new_name[1] != 'n')
14845 return false;
14846 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14847 return false;
14849 /* The following failures are certain mismatches. */
14850 *pcertain = true;
14852 /* _Znw must match _Zdl, _Zna must match _Zda. */
14853 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14854 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14855 return false;
14856 /* 'j', 'm' and 'y' correspond to size_t. */
14857 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14858 return false;
14859 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14860 return false;
14861 if (new_len == 4
14862 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14864 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14865 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14866 if (delete_len == 5)
14867 return true;
14868 if (delete_len == 6 && delete_name[5] == new_name[3])
14869 return true;
14870 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14871 return true;
14873 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14874 || (new_len == 33
14875 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14877 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14878 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14879 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14880 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14881 return true;
14882 if (delete_len == 21
14883 && delete_name[5] == new_name[3]
14884 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14885 return true;
14886 if (delete_len == 34
14887 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14888 return true;
14891 /* The negative result is conservative. */
14892 *pcertain = false;
14893 return false;
14896 /* Return the zero-based number corresponding to the argument being
14897 deallocated if FNDECL is a deallocation function or an out-of-bounds
14898 value if it isn't. */
14900 unsigned
14901 fndecl_dealloc_argno (tree fndecl)
14903 /* A call to operator delete isn't recognized as one to a built-in. */
14904 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14906 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14907 return 0;
14909 /* Avoid placement delete that's not been inlined. */
14910 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14911 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14912 || id_equal (fname, "_ZdaPvS_")) // array form
14913 return UINT_MAX;
14914 return 0;
14917 /* TODO: Handle user-defined functions with attribute malloc? Handle
14918 known non-built-ins like fopen? */
14919 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14921 switch (DECL_FUNCTION_CODE (fndecl))
14923 case BUILT_IN_FREE:
14924 case BUILT_IN_REALLOC:
14925 return 0;
14926 default:
14927 break;
14929 return UINT_MAX;
14932 tree attrs = DECL_ATTRIBUTES (fndecl);
14933 if (!attrs)
14934 return UINT_MAX;
14936 for (tree atfree = attrs;
14937 (atfree = lookup_attribute ("*dealloc", atfree));
14938 atfree = TREE_CHAIN (atfree))
14940 tree alloc = TREE_VALUE (atfree);
14941 if (!alloc)
14942 continue;
14944 tree pos = TREE_CHAIN (alloc);
14945 if (!pos)
14946 return 0;
14948 pos = TREE_VALUE (pos);
14949 return TREE_INT_CST_LOW (pos) - 1;
14952 return UINT_MAX;
14955 /* If EXPR refers to a character array or pointer declared attribute
14956 nonstring, return a decl for that array or pointer and set *REF
14957 to the referenced enclosing object or pointer. Otherwise return
14958 null. */
14960 tree
14961 get_attr_nonstring_decl (tree expr, tree *ref)
14963 tree decl = expr;
14964 tree var = NULL_TREE;
14965 if (TREE_CODE (decl) == SSA_NAME)
14967 gimple *def = SSA_NAME_DEF_STMT (decl);
14969 if (is_gimple_assign (def))
14971 tree_code code = gimple_assign_rhs_code (def);
14972 if (code == ADDR_EXPR
14973 || code == COMPONENT_REF
14974 || code == VAR_DECL)
14975 decl = gimple_assign_rhs1 (def);
14977 else
14978 var = SSA_NAME_VAR (decl);
14981 if (TREE_CODE (decl) == ADDR_EXPR)
14982 decl = TREE_OPERAND (decl, 0);
14984 /* To simplify calling code, store the referenced DECL regardless of
14985 the attribute determined below, but avoid storing the SSA_NAME_VAR
14986 obtained above (it's not useful for dataflow purposes). */
14987 if (ref)
14988 *ref = decl;
14990 /* Use the SSA_NAME_VAR that was determined above to see if it's
14991 declared nonstring. Otherwise drill down into the referenced
14992 DECL. */
14993 if (var)
14994 decl = var;
14995 else if (TREE_CODE (decl) == ARRAY_REF)
14996 decl = TREE_OPERAND (decl, 0);
14997 else if (TREE_CODE (decl) == COMPONENT_REF)
14998 decl = TREE_OPERAND (decl, 1);
14999 else if (TREE_CODE (decl) == MEM_REF)
15000 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15002 if (DECL_P (decl)
15003 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15004 return decl;
15006 return NULL_TREE;
15009 /* Return length of attribute names string,
15010 if arglist chain > 1, -1 otherwise. */
15013 get_target_clone_attr_len (tree arglist)
15015 tree arg;
15016 int str_len_sum = 0;
15017 int argnum = 0;
15019 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15021 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15022 size_t len = strlen (str);
15023 str_len_sum += len + 1;
15024 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15025 argnum++;
15026 argnum++;
15028 if (argnum <= 1)
15029 return -1;
15030 return str_len_sum;
15033 void
15034 tree_cc_finalize (void)
15036 clear_nonstandard_integer_type_cache ();
15039 #if CHECKING_P
15041 namespace selftest {
15043 /* Selftests for tree. */
15045 /* Verify that integer constants are sane. */
15047 static void
15048 test_integer_constants ()
15050 ASSERT_TRUE (integer_type_node != NULL);
15051 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15053 tree type = integer_type_node;
15055 tree zero = build_zero_cst (type);
15056 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15057 ASSERT_EQ (type, TREE_TYPE (zero));
15059 tree one = build_int_cst (type, 1);
15060 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15061 ASSERT_EQ (type, TREE_TYPE (zero));
15064 /* Verify identifiers. */
15066 static void
15067 test_identifiers ()
15069 tree identifier = get_identifier ("foo");
15070 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15071 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15074 /* Verify LABEL_DECL. */
15076 static void
15077 test_labels ()
15079 tree identifier = get_identifier ("err");
15080 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15081 identifier, void_type_node);
15082 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15083 ASSERT_FALSE (FORCED_LABEL (label_decl));
15086 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15087 are given by VALS. */
15089 static tree
15090 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15092 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15093 tree_vector_builder builder (type, vals.length (), 1);
15094 builder.splice (vals);
15095 return builder.build ();
15098 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15100 static void
15101 check_vector_cst (const vec<tree> &expected, tree actual)
15103 ASSERT_KNOWN_EQ (expected.length (),
15104 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15105 for (unsigned int i = 0; i < expected.length (); ++i)
15106 ASSERT_EQ (wi::to_wide (expected[i]),
15107 wi::to_wide (vector_cst_elt (actual, i)));
15110 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15111 and that its elements match EXPECTED. */
15113 static void
15114 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15115 unsigned int npatterns)
15117 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15118 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15119 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15120 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15121 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15122 check_vector_cst (expected, actual);
15125 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15126 and NPATTERNS background elements, and that its elements match
15127 EXPECTED. */
15129 static void
15130 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15131 unsigned int npatterns)
15133 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15134 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15135 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15136 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15137 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15138 check_vector_cst (expected, actual);
15141 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15142 and that its elements match EXPECTED. */
15144 static void
15145 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15146 unsigned int npatterns)
15148 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15149 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15150 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15151 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15152 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15153 check_vector_cst (expected, actual);
15156 /* Test the creation of VECTOR_CSTs. */
15158 static void
15159 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15161 auto_vec<tree, 8> elements (8);
15162 elements.quick_grow (8);
15163 tree element_type = build_nonstandard_integer_type (16, true);
15164 tree vector_type = build_vector_type (element_type, 8);
15166 /* Test a simple linear series with a base of 0 and a step of 1:
15167 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15168 for (unsigned int i = 0; i < 8; ++i)
15169 elements[i] = build_int_cst (element_type, i);
15170 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15171 check_vector_cst_stepped (elements, vector, 1);
15173 /* Try the same with the first element replaced by 100:
15174 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15175 elements[0] = build_int_cst (element_type, 100);
15176 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15177 check_vector_cst_stepped (elements, vector, 1);
15179 /* Try a series that wraps around.
15180 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15181 for (unsigned int i = 1; i < 8; ++i)
15182 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15183 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15184 check_vector_cst_stepped (elements, vector, 1);
15186 /* Try a downward series:
15187 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15188 for (unsigned int i = 1; i < 8; ++i)
15189 elements[i] = build_int_cst (element_type, 80 - i);
15190 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15191 check_vector_cst_stepped (elements, vector, 1);
15193 /* Try two interleaved series with different bases and steps:
15194 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15195 elements[1] = build_int_cst (element_type, 53);
15196 for (unsigned int i = 2; i < 8; i += 2)
15198 elements[i] = build_int_cst (element_type, 70 - i * 2);
15199 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15201 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15202 check_vector_cst_stepped (elements, vector, 2);
15204 /* Try a duplicated value:
15205 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15206 for (unsigned int i = 1; i < 8; ++i)
15207 elements[i] = elements[0];
15208 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15209 check_vector_cst_duplicate (elements, vector, 1);
15211 /* Try an interleaved duplicated value:
15212 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15213 elements[1] = build_int_cst (element_type, 55);
15214 for (unsigned int i = 2; i < 8; ++i)
15215 elements[i] = elements[i - 2];
15216 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15217 check_vector_cst_duplicate (elements, vector, 2);
15219 /* Try a duplicated value with 2 exceptions
15220 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15221 elements[0] = build_int_cst (element_type, 41);
15222 elements[1] = build_int_cst (element_type, 97);
15223 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15224 check_vector_cst_fill (elements, vector, 2);
15226 /* Try with and without a step
15227 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15228 for (unsigned int i = 3; i < 8; i += 2)
15229 elements[i] = build_int_cst (element_type, i * 7);
15230 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15231 check_vector_cst_stepped (elements, vector, 2);
15233 /* Try a fully-general constant:
15234 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15235 elements[5] = build_int_cst (element_type, 9990);
15236 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15237 check_vector_cst_fill (elements, vector, 4);
15240 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15241 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15242 modifying its argument in-place. */
15244 static void
15245 check_strip_nops (tree node, tree expected)
15247 STRIP_NOPS (node);
15248 ASSERT_EQ (expected, node);
15251 /* Verify location wrappers. */
15253 static void
15254 test_location_wrappers ()
15256 location_t loc = BUILTINS_LOCATION;
15258 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15260 /* Wrapping a constant. */
15261 tree int_cst = build_int_cst (integer_type_node, 42);
15262 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15263 ASSERT_FALSE (location_wrapper_p (int_cst));
15265 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15266 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15267 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15268 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15270 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15271 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15273 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15274 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15275 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15276 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15278 /* Wrapping a STRING_CST. */
15279 tree string_cst = build_string (4, "foo");
15280 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15281 ASSERT_FALSE (location_wrapper_p (string_cst));
15283 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15284 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15285 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15286 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15287 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15290 /* Wrapping a variable. */
15291 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15292 get_identifier ("some_int_var"),
15293 integer_type_node);
15294 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15295 ASSERT_FALSE (location_wrapper_p (int_var));
15297 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15298 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15299 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15300 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15302 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15303 wrapper. */
15304 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15305 ASSERT_FALSE (location_wrapper_p (r_cast));
15306 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15308 /* Verify that STRIP_NOPS removes wrappers. */
15309 check_strip_nops (wrapped_int_cst, int_cst);
15310 check_strip_nops (wrapped_string_cst, string_cst);
15311 check_strip_nops (wrapped_int_var, int_var);
15314 /* Test various tree predicates. Verify that location wrappers don't
15315 affect the results. */
15317 static void
15318 test_predicates ()
15320 /* Build various constants and wrappers around them. */
15322 location_t loc = BUILTINS_LOCATION;
15324 tree i_0 = build_int_cst (integer_type_node, 0);
15325 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15327 tree i_1 = build_int_cst (integer_type_node, 1);
15328 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15330 tree i_m1 = build_int_cst (integer_type_node, -1);
15331 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15333 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15334 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15335 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15336 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15337 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15338 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15340 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15341 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15342 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15344 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15345 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15346 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15348 /* TODO: vector constants. */
15350 /* Test integer_onep. */
15351 ASSERT_FALSE (integer_onep (i_0));
15352 ASSERT_FALSE (integer_onep (wr_i_0));
15353 ASSERT_TRUE (integer_onep (i_1));
15354 ASSERT_TRUE (integer_onep (wr_i_1));
15355 ASSERT_FALSE (integer_onep (i_m1));
15356 ASSERT_FALSE (integer_onep (wr_i_m1));
15357 ASSERT_FALSE (integer_onep (f_0));
15358 ASSERT_FALSE (integer_onep (wr_f_0));
15359 ASSERT_FALSE (integer_onep (f_1));
15360 ASSERT_FALSE (integer_onep (wr_f_1));
15361 ASSERT_FALSE (integer_onep (f_m1));
15362 ASSERT_FALSE (integer_onep (wr_f_m1));
15363 ASSERT_FALSE (integer_onep (c_i_0));
15364 ASSERT_TRUE (integer_onep (c_i_1));
15365 ASSERT_FALSE (integer_onep (c_i_m1));
15366 ASSERT_FALSE (integer_onep (c_f_0));
15367 ASSERT_FALSE (integer_onep (c_f_1));
15368 ASSERT_FALSE (integer_onep (c_f_m1));
15370 /* Test integer_zerop. */
15371 ASSERT_TRUE (integer_zerop (i_0));
15372 ASSERT_TRUE (integer_zerop (wr_i_0));
15373 ASSERT_FALSE (integer_zerop (i_1));
15374 ASSERT_FALSE (integer_zerop (wr_i_1));
15375 ASSERT_FALSE (integer_zerop (i_m1));
15376 ASSERT_FALSE (integer_zerop (wr_i_m1));
15377 ASSERT_FALSE (integer_zerop (f_0));
15378 ASSERT_FALSE (integer_zerop (wr_f_0));
15379 ASSERT_FALSE (integer_zerop (f_1));
15380 ASSERT_FALSE (integer_zerop (wr_f_1));
15381 ASSERT_FALSE (integer_zerop (f_m1));
15382 ASSERT_FALSE (integer_zerop (wr_f_m1));
15383 ASSERT_TRUE (integer_zerop (c_i_0));
15384 ASSERT_FALSE (integer_zerop (c_i_1));
15385 ASSERT_FALSE (integer_zerop (c_i_m1));
15386 ASSERT_FALSE (integer_zerop (c_f_0));
15387 ASSERT_FALSE (integer_zerop (c_f_1));
15388 ASSERT_FALSE (integer_zerop (c_f_m1));
15390 /* Test integer_all_onesp. */
15391 ASSERT_FALSE (integer_all_onesp (i_0));
15392 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15393 ASSERT_FALSE (integer_all_onesp (i_1));
15394 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15395 ASSERT_TRUE (integer_all_onesp (i_m1));
15396 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15397 ASSERT_FALSE (integer_all_onesp (f_0));
15398 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15399 ASSERT_FALSE (integer_all_onesp (f_1));
15400 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15401 ASSERT_FALSE (integer_all_onesp (f_m1));
15402 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15403 ASSERT_FALSE (integer_all_onesp (c_i_0));
15404 ASSERT_FALSE (integer_all_onesp (c_i_1));
15405 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15406 ASSERT_FALSE (integer_all_onesp (c_f_0));
15407 ASSERT_FALSE (integer_all_onesp (c_f_1));
15408 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15410 /* Test integer_minus_onep. */
15411 ASSERT_FALSE (integer_minus_onep (i_0));
15412 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15413 ASSERT_FALSE (integer_minus_onep (i_1));
15414 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15415 ASSERT_TRUE (integer_minus_onep (i_m1));
15416 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15417 ASSERT_FALSE (integer_minus_onep (f_0));
15418 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15419 ASSERT_FALSE (integer_minus_onep (f_1));
15420 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15421 ASSERT_FALSE (integer_minus_onep (f_m1));
15422 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15423 ASSERT_FALSE (integer_minus_onep (c_i_0));
15424 ASSERT_FALSE (integer_minus_onep (c_i_1));
15425 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15426 ASSERT_FALSE (integer_minus_onep (c_f_0));
15427 ASSERT_FALSE (integer_minus_onep (c_f_1));
15428 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15430 /* Test integer_each_onep. */
15431 ASSERT_FALSE (integer_each_onep (i_0));
15432 ASSERT_FALSE (integer_each_onep (wr_i_0));
15433 ASSERT_TRUE (integer_each_onep (i_1));
15434 ASSERT_TRUE (integer_each_onep (wr_i_1));
15435 ASSERT_FALSE (integer_each_onep (i_m1));
15436 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15437 ASSERT_FALSE (integer_each_onep (f_0));
15438 ASSERT_FALSE (integer_each_onep (wr_f_0));
15439 ASSERT_FALSE (integer_each_onep (f_1));
15440 ASSERT_FALSE (integer_each_onep (wr_f_1));
15441 ASSERT_FALSE (integer_each_onep (f_m1));
15442 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15443 ASSERT_FALSE (integer_each_onep (c_i_0));
15444 ASSERT_FALSE (integer_each_onep (c_i_1));
15445 ASSERT_FALSE (integer_each_onep (c_i_m1));
15446 ASSERT_FALSE (integer_each_onep (c_f_0));
15447 ASSERT_FALSE (integer_each_onep (c_f_1));
15448 ASSERT_FALSE (integer_each_onep (c_f_m1));
15450 /* Test integer_truep. */
15451 ASSERT_FALSE (integer_truep (i_0));
15452 ASSERT_FALSE (integer_truep (wr_i_0));
15453 ASSERT_TRUE (integer_truep (i_1));
15454 ASSERT_TRUE (integer_truep (wr_i_1));
15455 ASSERT_FALSE (integer_truep (i_m1));
15456 ASSERT_FALSE (integer_truep (wr_i_m1));
15457 ASSERT_FALSE (integer_truep (f_0));
15458 ASSERT_FALSE (integer_truep (wr_f_0));
15459 ASSERT_FALSE (integer_truep (f_1));
15460 ASSERT_FALSE (integer_truep (wr_f_1));
15461 ASSERT_FALSE (integer_truep (f_m1));
15462 ASSERT_FALSE (integer_truep (wr_f_m1));
15463 ASSERT_FALSE (integer_truep (c_i_0));
15464 ASSERT_TRUE (integer_truep (c_i_1));
15465 ASSERT_FALSE (integer_truep (c_i_m1));
15466 ASSERT_FALSE (integer_truep (c_f_0));
15467 ASSERT_FALSE (integer_truep (c_f_1));
15468 ASSERT_FALSE (integer_truep (c_f_m1));
15470 /* Test integer_nonzerop. */
15471 ASSERT_FALSE (integer_nonzerop (i_0));
15472 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15473 ASSERT_TRUE (integer_nonzerop (i_1));
15474 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15475 ASSERT_TRUE (integer_nonzerop (i_m1));
15476 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15477 ASSERT_FALSE (integer_nonzerop (f_0));
15478 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15479 ASSERT_FALSE (integer_nonzerop (f_1));
15480 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15481 ASSERT_FALSE (integer_nonzerop (f_m1));
15482 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15483 ASSERT_FALSE (integer_nonzerop (c_i_0));
15484 ASSERT_TRUE (integer_nonzerop (c_i_1));
15485 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15486 ASSERT_FALSE (integer_nonzerop (c_f_0));
15487 ASSERT_FALSE (integer_nonzerop (c_f_1));
15488 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15490 /* Test real_zerop. */
15491 ASSERT_FALSE (real_zerop (i_0));
15492 ASSERT_FALSE (real_zerop (wr_i_0));
15493 ASSERT_FALSE (real_zerop (i_1));
15494 ASSERT_FALSE (real_zerop (wr_i_1));
15495 ASSERT_FALSE (real_zerop (i_m1));
15496 ASSERT_FALSE (real_zerop (wr_i_m1));
15497 ASSERT_TRUE (real_zerop (f_0));
15498 ASSERT_TRUE (real_zerop (wr_f_0));
15499 ASSERT_FALSE (real_zerop (f_1));
15500 ASSERT_FALSE (real_zerop (wr_f_1));
15501 ASSERT_FALSE (real_zerop (f_m1));
15502 ASSERT_FALSE (real_zerop (wr_f_m1));
15503 ASSERT_FALSE (real_zerop (c_i_0));
15504 ASSERT_FALSE (real_zerop (c_i_1));
15505 ASSERT_FALSE (real_zerop (c_i_m1));
15506 ASSERT_TRUE (real_zerop (c_f_0));
15507 ASSERT_FALSE (real_zerop (c_f_1));
15508 ASSERT_FALSE (real_zerop (c_f_m1));
15510 /* Test real_onep. */
15511 ASSERT_FALSE (real_onep (i_0));
15512 ASSERT_FALSE (real_onep (wr_i_0));
15513 ASSERT_FALSE (real_onep (i_1));
15514 ASSERT_FALSE (real_onep (wr_i_1));
15515 ASSERT_FALSE (real_onep (i_m1));
15516 ASSERT_FALSE (real_onep (wr_i_m1));
15517 ASSERT_FALSE (real_onep (f_0));
15518 ASSERT_FALSE (real_onep (wr_f_0));
15519 ASSERT_TRUE (real_onep (f_1));
15520 ASSERT_TRUE (real_onep (wr_f_1));
15521 ASSERT_FALSE (real_onep (f_m1));
15522 ASSERT_FALSE (real_onep (wr_f_m1));
15523 ASSERT_FALSE (real_onep (c_i_0));
15524 ASSERT_FALSE (real_onep (c_i_1));
15525 ASSERT_FALSE (real_onep (c_i_m1));
15526 ASSERT_FALSE (real_onep (c_f_0));
15527 ASSERT_TRUE (real_onep (c_f_1));
15528 ASSERT_FALSE (real_onep (c_f_m1));
15530 /* Test real_minus_onep. */
15531 ASSERT_FALSE (real_minus_onep (i_0));
15532 ASSERT_FALSE (real_minus_onep (wr_i_0));
15533 ASSERT_FALSE (real_minus_onep (i_1));
15534 ASSERT_FALSE (real_minus_onep (wr_i_1));
15535 ASSERT_FALSE (real_minus_onep (i_m1));
15536 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15537 ASSERT_FALSE (real_minus_onep (f_0));
15538 ASSERT_FALSE (real_minus_onep (wr_f_0));
15539 ASSERT_FALSE (real_minus_onep (f_1));
15540 ASSERT_FALSE (real_minus_onep (wr_f_1));
15541 ASSERT_TRUE (real_minus_onep (f_m1));
15542 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15543 ASSERT_FALSE (real_minus_onep (c_i_0));
15544 ASSERT_FALSE (real_minus_onep (c_i_1));
15545 ASSERT_FALSE (real_minus_onep (c_i_m1));
15546 ASSERT_FALSE (real_minus_onep (c_f_0));
15547 ASSERT_FALSE (real_minus_onep (c_f_1));
15548 ASSERT_TRUE (real_minus_onep (c_f_m1));
15550 /* Test zerop. */
15551 ASSERT_TRUE (zerop (i_0));
15552 ASSERT_TRUE (zerop (wr_i_0));
15553 ASSERT_FALSE (zerop (i_1));
15554 ASSERT_FALSE (zerop (wr_i_1));
15555 ASSERT_FALSE (zerop (i_m1));
15556 ASSERT_FALSE (zerop (wr_i_m1));
15557 ASSERT_TRUE (zerop (f_0));
15558 ASSERT_TRUE (zerop (wr_f_0));
15559 ASSERT_FALSE (zerop (f_1));
15560 ASSERT_FALSE (zerop (wr_f_1));
15561 ASSERT_FALSE (zerop (f_m1));
15562 ASSERT_FALSE (zerop (wr_f_m1));
15563 ASSERT_TRUE (zerop (c_i_0));
15564 ASSERT_FALSE (zerop (c_i_1));
15565 ASSERT_FALSE (zerop (c_i_m1));
15566 ASSERT_TRUE (zerop (c_f_0));
15567 ASSERT_FALSE (zerop (c_f_1));
15568 ASSERT_FALSE (zerop (c_f_m1));
15570 /* Test tree_expr_nonnegative_p. */
15571 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15572 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15573 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15574 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15575 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15576 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15577 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15578 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15579 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15580 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15581 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15582 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15583 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15584 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15585 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15586 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15587 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15588 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15590 /* Test tree_expr_nonzero_p. */
15591 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15592 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15593 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15594 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15595 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15596 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15598 /* Test integer_valued_real_p. */
15599 ASSERT_FALSE (integer_valued_real_p (i_0));
15600 ASSERT_TRUE (integer_valued_real_p (f_0));
15601 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15602 ASSERT_TRUE (integer_valued_real_p (f_1));
15603 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15605 /* Test integer_pow2p. */
15606 ASSERT_FALSE (integer_pow2p (i_0));
15607 ASSERT_TRUE (integer_pow2p (i_1));
15608 ASSERT_TRUE (integer_pow2p (wr_i_1));
15610 /* Test uniform_integer_cst_p. */
15611 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15612 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15613 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15614 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15615 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15616 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15617 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15618 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15619 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15620 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15621 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15622 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15623 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15624 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15625 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15626 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15627 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15628 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15631 /* Check that string escaping works correctly. */
15633 static void
15634 test_escaped_strings (void)
15636 int saved_cutoff;
15637 escaped_string msg;
15639 msg.escape (NULL);
15640 /* ASSERT_STREQ does not accept NULL as a valid test
15641 result, so we have to use ASSERT_EQ instead. */
15642 ASSERT_EQ (NULL, (const char *) msg);
15644 msg.escape ("");
15645 ASSERT_STREQ ("", (const char *) msg);
15647 msg.escape ("foobar");
15648 ASSERT_STREQ ("foobar", (const char *) msg);
15650 /* Ensure that we have -fmessage-length set to 0. */
15651 saved_cutoff = pp_line_cutoff (global_dc->printer);
15652 pp_line_cutoff (global_dc->printer) = 0;
15654 msg.escape ("foo\nbar");
15655 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15657 msg.escape ("\a\b\f\n\r\t\v");
15658 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15660 /* Now repeat the tests with -fmessage-length set to 5. */
15661 pp_line_cutoff (global_dc->printer) = 5;
15663 /* Note that the newline is not translated into an escape. */
15664 msg.escape ("foo\nbar");
15665 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15667 msg.escape ("\a\b\f\n\r\t\v");
15668 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15670 /* Restore the original message length setting. */
15671 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15674 /* Run all of the selftests within this file. */
15676 void
15677 tree_cc_tests ()
15679 test_integer_constants ();
15680 test_identifiers ();
15681 test_labels ();
15682 test_vector_cst_patterns ();
15683 test_location_wrappers ();
15684 test_predicates ();
15685 test_escaped_strings ();
15688 } // namespace selftest
15690 #endif /* CHECKING_P */
15692 #include "gt-tree.h"