tree-ssa-math-opts: Fix up match_uaddc_usubc [PR111845]
[official-gcc.git] / gcc / tree.cc
blob69369c6c3eeeb838d39a44c8a455cd52686a48d8
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 1, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 2, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_DEVICE_TYPE */
302 0, /* OMP_CLAUSE_FOR */
303 0, /* OMP_CLAUSE_PARALLEL */
304 0, /* OMP_CLAUSE_SECTIONS */
305 0, /* OMP_CLAUSE_TASKGROUP */
306 1, /* OMP_CLAUSE_PRIORITY */
307 1, /* OMP_CLAUSE_GRAINSIZE */
308 1, /* OMP_CLAUSE_NUM_TASKS */
309 0, /* OMP_CLAUSE_NOGROUP */
310 0, /* OMP_CLAUSE_THREADS */
311 0, /* OMP_CLAUSE_SIMD */
312 1, /* OMP_CLAUSE_HINT */
313 0, /* OMP_CLAUSE_DEFAULTMAP */
314 0, /* OMP_CLAUSE_ORDER */
315 0, /* OMP_CLAUSE_BIND */
316 1, /* OMP_CLAUSE_FILTER */
317 1, /* OMP_CLAUSE__SIMDUID_ */
318 0, /* OMP_CLAUSE__SIMT_ */
319 0, /* OMP_CLAUSE_INDEPENDENT */
320 1, /* OMP_CLAUSE_WORKER */
321 1, /* OMP_CLAUSE_VECTOR */
322 1, /* OMP_CLAUSE_NUM_GANGS */
323 1, /* OMP_CLAUSE_NUM_WORKERS */
324 1, /* OMP_CLAUSE_VECTOR_LENGTH */
325 3, /* OMP_CLAUSE_TILE */
326 0, /* OMP_CLAUSE_IF_PRESENT */
327 0, /* OMP_CLAUSE_FINALIZE */
328 0, /* OMP_CLAUSE_NOHOST */
331 const char * const omp_clause_code_name[] =
333 "error_clause",
334 "private",
335 "shared",
336 "firstprivate",
337 "lastprivate",
338 "reduction",
339 "task_reduction",
340 "in_reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "affinity",
345 "aligned",
346 "allocate",
347 "depend",
348 "nontemporal",
349 "uniform",
350 "enter",
351 "link",
352 "detach",
353 "use_device_ptr",
354 "use_device_addr",
355 "is_device_ptr",
356 "inclusive",
357 "exclusive",
358 "from",
359 "to",
360 "map",
361 "has_device_addr",
362 "doacross",
363 "_cache_",
364 "gang",
365 "async",
366 "wait",
367 "auto",
368 "seq",
369 "_looptemp_",
370 "_reductemp_",
371 "_condtemp_",
372 "_scantemp_",
373 "if",
374 "num_threads",
375 "schedule",
376 "nowait",
377 "ordered",
378 "default",
379 "collapse",
380 "untied",
381 "final",
382 "mergeable",
383 "device",
384 "dist_schedule",
385 "inbranch",
386 "notinbranch",
387 "num_teams",
388 "thread_limit",
389 "proc_bind",
390 "safelen",
391 "simdlen",
392 "device_type",
393 "for",
394 "parallel",
395 "sections",
396 "taskgroup",
397 "priority",
398 "grainsize",
399 "num_tasks",
400 "nogroup",
401 "threads",
402 "simd",
403 "hint",
404 "defaultmap",
405 "order",
406 "bind",
407 "filter",
408 "_simduid_",
409 "_simt_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length",
416 "tile",
417 "if_present",
418 "finalize",
419 "nohost",
422 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
423 clause names, but for use in diagnostics etc. would like to use the "user"
424 clause names. */
426 const char *
427 user_omp_clause_code_name (tree clause, bool oacc)
429 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
430 distinguish clauses as seen by the user. See also where front ends do
431 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
432 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
433 switch (OMP_CLAUSE_MAP_KIND (clause))
435 case GOMP_MAP_FORCE_ALLOC:
436 case GOMP_MAP_ALLOC: return "create";
437 case GOMP_MAP_FORCE_TO:
438 case GOMP_MAP_TO: return "copyin";
439 case GOMP_MAP_FORCE_FROM:
440 case GOMP_MAP_FROM: return "copyout";
441 case GOMP_MAP_FORCE_TOFROM:
442 case GOMP_MAP_TOFROM: return "copy";
443 case GOMP_MAP_RELEASE: return "delete";
444 case GOMP_MAP_FORCE_PRESENT: return "present";
445 case GOMP_MAP_ATTACH: return "attach";
446 case GOMP_MAP_FORCE_DETACH:
447 case GOMP_MAP_DETACH: return "detach";
448 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
449 case GOMP_MAP_LINK: return "link";
450 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
451 default: break;
454 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.cc. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.cc:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case BITINT_TYPE:
995 case LANG_TYPE: return sizeof (tree_type_non_common);
996 default:
997 gcc_checking_assert (code >= NUM_TREE_CODES);
998 return lang_hooks.tree_size (code);
1001 case tcc_reference: /* a reference */
1002 case tcc_expression: /* an expression */
1003 case tcc_statement: /* an expression with side effects */
1004 case tcc_comparison: /* a comparison expression */
1005 case tcc_unary: /* a unary arithmetic expression */
1006 case tcc_binary: /* a binary arithmetic expression */
1007 return (sizeof (struct tree_exp)
1008 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1010 case tcc_constant: /* a constant */
1011 switch (code)
1013 case VOID_CST: return sizeof (tree_typed);
1014 case INTEGER_CST: gcc_unreachable ();
1015 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1016 case REAL_CST: return sizeof (tree_real_cst);
1017 case FIXED_CST: return sizeof (tree_fixed_cst);
1018 case COMPLEX_CST: return sizeof (tree_complex);
1019 case VECTOR_CST: gcc_unreachable ();
1020 case STRING_CST: gcc_unreachable ();
1021 default:
1022 gcc_checking_assert (code >= NUM_TREE_CODES);
1023 return lang_hooks.tree_size (code);
1026 case tcc_exceptional: /* something random, like an identifier. */
1027 switch (code)
1029 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1030 case TREE_LIST: return sizeof (tree_list);
1032 case ERROR_MARK:
1033 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1035 case TREE_VEC: gcc_unreachable ();
1036 case OMP_CLAUSE: gcc_unreachable ();
1038 case SSA_NAME: return sizeof (tree_ssa_name);
1040 case STATEMENT_LIST: return sizeof (tree_statement_list);
1041 case BLOCK: return sizeof (struct tree_block);
1042 case CONSTRUCTOR: return sizeof (tree_constructor);
1043 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1044 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1046 default:
1047 gcc_checking_assert (code >= NUM_TREE_CODES);
1048 return lang_hooks.tree_size (code);
1051 default:
1052 gcc_unreachable ();
1056 /* Compute the number of bytes occupied by NODE. This routine only
1057 looks at TREE_CODE, except for those nodes that have variable sizes. */
1058 size_t
1059 tree_size (const_tree node)
1061 const enum tree_code code = TREE_CODE (node);
1062 switch (code)
1064 case INTEGER_CST:
1065 return (sizeof (struct tree_int_cst)
1066 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1068 case TREE_BINFO:
1069 return (offsetof (struct tree_binfo, base_binfos)
1070 + vec<tree, va_gc>
1071 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1073 case TREE_VEC:
1074 return (sizeof (struct tree_vec)
1075 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1077 case VECTOR_CST:
1078 return (sizeof (struct tree_vector)
1079 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1081 case STRING_CST:
1082 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1084 case OMP_CLAUSE:
1085 return (sizeof (struct tree_omp_clause)
1086 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1087 * sizeof (tree));
1089 default:
1090 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1091 return (sizeof (struct tree_exp)
1092 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1093 else
1094 return tree_code_size (code);
1098 /* Return tree node kind based on tree CODE. */
1100 static tree_node_kind
1101 get_stats_node_kind (enum tree_code code)
1103 enum tree_code_class type = TREE_CODE_CLASS (code);
1105 switch (type)
1107 case tcc_declaration: /* A decl node */
1108 return d_kind;
1109 case tcc_type: /* a type node */
1110 return t_kind;
1111 case tcc_statement: /* an expression with side effects */
1112 return s_kind;
1113 case tcc_reference: /* a reference */
1114 return r_kind;
1115 case tcc_expression: /* an expression */
1116 case tcc_comparison: /* a comparison expression */
1117 case tcc_unary: /* a unary arithmetic expression */
1118 case tcc_binary: /* a binary arithmetic expression */
1119 return e_kind;
1120 case tcc_constant: /* a constant */
1121 return c_kind;
1122 case tcc_exceptional: /* something random, like an identifier. */
1123 switch (code)
1125 case IDENTIFIER_NODE:
1126 return id_kind;
1127 case TREE_VEC:
1128 return vec_kind;
1129 case TREE_BINFO:
1130 return binfo_kind;
1131 case SSA_NAME:
1132 return ssa_name_kind;
1133 case BLOCK:
1134 return b_kind;
1135 case CONSTRUCTOR:
1136 return constr_kind;
1137 case OMP_CLAUSE:
1138 return omp_clause_kind;
1139 default:
1140 return x_kind;
1142 break;
1143 case tcc_vl_exp:
1144 return e_kind;
1145 default:
1146 gcc_unreachable ();
1150 /* Record interesting allocation statistics for a tree node with CODE
1151 and LENGTH. */
1153 static void
1154 record_node_allocation_statistics (enum tree_code code, size_t length)
1156 if (!GATHER_STATISTICS)
1157 return;
1159 tree_node_kind kind = get_stats_node_kind (code);
1161 tree_code_counts[(int) code]++;
1162 tree_node_counts[(int) kind]++;
1163 tree_node_sizes[(int) kind] += length;
1166 /* Allocate and return a new UID from the DECL_UID namespace. */
1169 allocate_decl_uid (void)
1171 return next_decl_uid++;
1174 /* Return a newly allocated node of code CODE. For decl and type
1175 nodes, some other fields are initialized. The rest of the node is
1176 initialized to zero. This function cannot be used for TREE_VEC,
1177 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1178 tree_code_size.
1180 Achoo! I got a code in the node. */
1182 tree
1183 make_node (enum tree_code code MEM_STAT_DECL)
1185 tree t;
1186 enum tree_code_class type = TREE_CODE_CLASS (code);
1187 size_t length = tree_code_size (code);
1189 record_node_allocation_statistics (code, length);
1191 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1192 TREE_SET_CODE (t, code);
1194 switch (type)
1196 case tcc_statement:
1197 if (code != DEBUG_BEGIN_STMT)
1198 TREE_SIDE_EFFECTS (t) = 1;
1199 break;
1201 case tcc_declaration:
1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1204 if (code == FUNCTION_DECL)
1206 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1207 SET_DECL_MODE (t, FUNCTION_MODE);
1209 else
1210 SET_DECL_ALIGN (t, 1);
1212 DECL_SOURCE_LOCATION (t) = input_location;
1213 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1214 DECL_UID (t) = --next_debug_decl_uid;
1215 else
1217 DECL_UID (t) = allocate_decl_uid ();
1218 SET_DECL_PT_UID (t, -1);
1220 if (TREE_CODE (t) == LABEL_DECL)
1221 LABEL_DECL_UID (t) = -1;
1223 break;
1225 case tcc_type:
1226 TYPE_UID (t) = next_type_uid++;
1227 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1228 TYPE_USER_ALIGN (t) = 0;
1229 TYPE_MAIN_VARIANT (t) = t;
1230 TYPE_CANONICAL (t) = t;
1232 /* Default to no attributes for type, but let target change that. */
1233 TYPE_ATTRIBUTES (t) = NULL_TREE;
1234 targetm.set_default_type_attributes (t);
1236 /* We have not yet computed the alias set for this type. */
1237 TYPE_ALIAS_SET (t) = -1;
1238 break;
1240 case tcc_constant:
1241 TREE_CONSTANT (t) = 1;
1242 break;
1244 case tcc_expression:
1245 switch (code)
1247 case INIT_EXPR:
1248 case MODIFY_EXPR:
1249 case VA_ARG_EXPR:
1250 case PREDECREMENT_EXPR:
1251 case PREINCREMENT_EXPR:
1252 case POSTDECREMENT_EXPR:
1253 case POSTINCREMENT_EXPR:
1254 /* All of these have side-effects, no matter what their
1255 operands are. */
1256 TREE_SIDE_EFFECTS (t) = 1;
1257 break;
1259 default:
1260 break;
1262 break;
1264 case tcc_exceptional:
1265 switch (code)
1267 case TARGET_OPTION_NODE:
1268 TREE_TARGET_OPTION(t)
1269 = ggc_cleared_alloc<struct cl_target_option> ();
1270 break;
1272 case OPTIMIZATION_NODE:
1273 TREE_OPTIMIZATION (t)
1274 = ggc_cleared_alloc<struct cl_optimization> ();
1275 break;
1277 default:
1278 break;
1280 break;
1282 default:
1283 /* Other classes need no special treatment. */
1284 break;
1287 return t;
1290 /* Free tree node. */
1292 void
1293 free_node (tree node)
1295 enum tree_code code = TREE_CODE (node);
1296 if (GATHER_STATISTICS)
1298 enum tree_node_kind kind = get_stats_node_kind (code);
1300 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1301 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1302 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1304 tree_code_counts[(int) TREE_CODE (node)]--;
1305 tree_node_counts[(int) kind]--;
1306 tree_node_sizes[(int) kind] -= tree_size (node);
1308 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1309 vec_free (CONSTRUCTOR_ELTS (node));
1310 else if (code == BLOCK)
1311 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1312 else if (code == TREE_BINFO)
1313 vec_free (BINFO_BASE_ACCESSES (node));
1314 else if (code == OPTIMIZATION_NODE)
1315 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1316 else if (code == TARGET_OPTION_NODE)
1317 cl_target_option_free (TREE_TARGET_OPTION (node));
1318 ggc_free (node);
1321 /* Return a new node with the same contents as NODE except that its
1322 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1324 tree
1325 copy_node (tree node MEM_STAT_DECL)
1327 tree t;
1328 enum tree_code code = TREE_CODE (node);
1329 size_t length;
1331 gcc_assert (code != STATEMENT_LIST);
1333 length = tree_size (node);
1334 record_node_allocation_statistics (code, length);
1335 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1336 memcpy (t, node, length);
1338 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1339 TREE_CHAIN (t) = 0;
1340 TREE_ASM_WRITTEN (t) = 0;
1341 TREE_VISITED (t) = 0;
1343 if (TREE_CODE_CLASS (code) == tcc_declaration)
1345 if (code == DEBUG_EXPR_DECL)
1346 DECL_UID (t) = --next_debug_decl_uid;
1347 else
1349 DECL_UID (t) = allocate_decl_uid ();
1350 if (DECL_PT_UID_SET_P (node))
1351 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1353 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1354 && DECL_HAS_VALUE_EXPR_P (node))
1356 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1357 DECL_HAS_VALUE_EXPR_P (t) = 1;
1359 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1360 if (VAR_P (node))
1362 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1363 t->decl_with_vis.symtab_node = NULL;
1365 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1367 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1368 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1370 if (TREE_CODE (node) == FUNCTION_DECL)
1372 DECL_STRUCT_FUNCTION (t) = NULL;
1373 t->decl_with_vis.symtab_node = NULL;
1376 else if (TREE_CODE_CLASS (code) == tcc_type)
1378 TYPE_UID (t) = next_type_uid++;
1379 /* The following is so that the debug code for
1380 the copy is different from the original type.
1381 The two statements usually duplicate each other
1382 (because they clear fields of the same union),
1383 but the optimizer should catch that. */
1384 TYPE_SYMTAB_ADDRESS (t) = 0;
1385 TYPE_SYMTAB_DIE (t) = 0;
1387 /* Do not copy the values cache. */
1388 if (TYPE_CACHED_VALUES_P (t))
1390 TYPE_CACHED_VALUES_P (t) = 0;
1391 TYPE_CACHED_VALUES (t) = NULL_TREE;
1394 else if (code == TARGET_OPTION_NODE)
1396 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1397 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1398 sizeof (struct cl_target_option));
1400 else if (code == OPTIMIZATION_NODE)
1402 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1403 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1404 sizeof (struct cl_optimization));
1407 return t;
1410 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1411 For example, this can copy a list made of TREE_LIST nodes. */
1413 tree
1414 copy_list (tree list)
1416 tree head;
1417 tree prev, next;
1419 if (list == 0)
1420 return 0;
1422 head = prev = copy_node (list);
1423 next = TREE_CHAIN (list);
1424 while (next)
1426 TREE_CHAIN (prev) = copy_node (next);
1427 prev = TREE_CHAIN (prev);
1428 next = TREE_CHAIN (next);
1430 return head;
1434 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1435 INTEGER_CST with value CST and type TYPE. */
1437 static unsigned int
1438 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1440 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1441 /* We need extra HWIs if CST is an unsigned integer with its
1442 upper bit set. */
1443 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1444 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1445 return cst.get_len ();
1448 /* Return a new INTEGER_CST with value CST and type TYPE. */
1450 static tree
1451 build_new_int_cst (tree type, const wide_int &cst)
1453 unsigned int len = cst.get_len ();
1454 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1455 tree nt = make_int_cst (len, ext_len);
1457 if (len < ext_len)
1459 --ext_len;
1460 TREE_INT_CST_ELT (nt, ext_len)
1461 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1462 for (unsigned int i = len; i < ext_len; ++i)
1463 TREE_INT_CST_ELT (nt, i) = -1;
1465 else if (TYPE_UNSIGNED (type)
1466 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1468 len--;
1469 TREE_INT_CST_ELT (nt, len)
1470 = zext_hwi (cst.elt (len),
1471 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1474 for (unsigned int i = 0; i < len; i++)
1475 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1476 TREE_TYPE (nt) = type;
1477 return nt;
1480 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1482 static tree
1483 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1484 CXX_MEM_STAT_INFO)
1486 size_t length = sizeof (struct tree_poly_int_cst);
1487 record_node_allocation_statistics (POLY_INT_CST, length);
1489 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1491 TREE_SET_CODE (t, POLY_INT_CST);
1492 TREE_CONSTANT (t) = 1;
1493 TREE_TYPE (t) = type;
1494 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1495 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1496 return t;
1499 /* Create a constant tree that contains CST sign-extended to TYPE. */
1501 tree
1502 build_int_cst (tree type, poly_int64 cst)
1504 /* Support legacy code. */
1505 if (!type)
1506 type = integer_type_node;
1508 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1511 /* Create a constant tree that contains CST zero-extended to TYPE. */
1513 tree
1514 build_int_cstu (tree type, poly_uint64 cst)
1516 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1519 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521 tree
1522 build_int_cst_type (tree type, poly_int64 cst)
1524 gcc_assert (type);
1525 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1528 /* Constructs tree in type TYPE from with value given by CST. Signedness
1529 of CST is assumed to be the same as the signedness of TYPE. */
1531 tree
1532 double_int_to_tree (tree type, double_int cst)
1534 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1537 /* We force the wide_int CST to the range of the type TYPE by sign or
1538 zero extending it. OVERFLOWABLE indicates if we are interested in
1539 overflow of the value, when >0 we are only interested in signed
1540 overflow, for <0 we are interested in any overflow. OVERFLOWED
1541 indicates whether overflow has already occurred. CONST_OVERFLOWED
1542 indicates whether constant overflow has already occurred. We force
1543 T's value to be within range of T's type (by setting to 0 or 1 all
1544 the bits outside the type's range). We set TREE_OVERFLOWED if,
1545 OVERFLOWED is nonzero,
1546 or OVERFLOWABLE is >0 and signed overflow occurs
1547 or OVERFLOWABLE is <0 and any overflow occurs
1548 We return a new tree node for the extended wide_int. The node
1549 is shared if no overflow flags are set. */
1552 tree
1553 force_fit_type (tree type, const poly_wide_int_ref &cst,
1554 int overflowable, bool overflowed)
1556 signop sign = TYPE_SIGN (type);
1558 /* If we need to set overflow flags, return a new unshared node. */
1559 if (overflowed || !wi::fits_to_tree_p (cst, type))
1561 if (overflowed
1562 || overflowable < 0
1563 || (overflowable > 0 && sign == SIGNED))
1565 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1566 sign);
1567 tree t;
1568 if (tmp.is_constant ())
1569 t = build_new_int_cst (type, tmp.coeffs[0]);
1570 else
1572 tree coeffs[NUM_POLY_INT_COEFFS];
1573 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1575 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1576 TREE_OVERFLOW (coeffs[i]) = 1;
1578 t = build_new_poly_int_cst (type, coeffs);
1580 TREE_OVERFLOW (t) = 1;
1581 return t;
1585 /* Else build a shared node. */
1586 return wide_int_to_tree (type, cst);
1589 /* These are the hash table functions for the hash table of INTEGER_CST
1590 nodes of a sizetype. */
1592 /* Return the hash code X, an INTEGER_CST. */
1594 hashval_t
1595 int_cst_hasher::hash (tree x)
1597 const_tree const t = x;
1598 hashval_t code = TYPE_UID (TREE_TYPE (t));
1599 int i;
1601 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1602 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1604 return code;
1607 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1608 is the same as that given by *Y, which is the same. */
1610 bool
1611 int_cst_hasher::equal (tree x, tree y)
1613 const_tree const xt = x;
1614 const_tree const yt = y;
1616 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1617 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1618 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1619 return false;
1621 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1622 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1623 return false;
1625 return true;
1628 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1629 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1630 number of slots that can be cached for the type. */
1632 static inline tree
1633 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1634 int slot, int max_slots)
1636 gcc_checking_assert (slot >= 0);
1637 /* Initialize cache. */
1638 if (!TYPE_CACHED_VALUES_P (type))
1640 TYPE_CACHED_VALUES_P (type) = 1;
1641 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1643 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1644 if (!t)
1646 /* Create a new shared int. */
1647 t = build_new_int_cst (type, cst);
1648 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1650 return t;
1653 /* Create an INT_CST node of TYPE and value CST.
1654 The returned node is always shared. For small integers we use a
1655 per-type vector cache, for larger ones we use a single hash table.
1656 The value is extended from its precision according to the sign of
1657 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1658 the upper bits and ensures that hashing and value equality based
1659 upon the underlying HOST_WIDE_INTs works without masking. */
1661 static tree
1662 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1664 tree t;
1665 int ix = -1;
1666 int limit = 0;
1668 gcc_assert (type);
1669 unsigned int prec = TYPE_PRECISION (type);
1670 signop sgn = TYPE_SIGN (type);
1672 /* Verify that everything is canonical. */
1673 int l = pcst.get_len ();
1674 if (l > 1)
1676 if (pcst.elt (l - 1) == 0)
1677 gcc_checking_assert (pcst.elt (l - 2) < 0);
1678 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1679 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1682 wide_int cst = wide_int::from (pcst, prec, sgn);
1683 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1685 enum tree_code code = TREE_CODE (type);
1686 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1688 /* Cache NULL pointer and zero bounds. */
1689 if (cst == 0)
1690 ix = 0;
1691 /* Cache upper bounds of pointers. */
1692 else if (cst == wi::max_value (prec, sgn))
1693 ix = 1;
1694 /* Cache 1 which is used for a non-zero range. */
1695 else if (cst == 1)
1696 ix = 2;
1698 if (ix >= 0)
1700 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1701 /* Make sure no one is clobbering the shared constant. */
1702 gcc_checking_assert (TREE_TYPE (t) == type
1703 && cst == wi::to_wide (t));
1704 return t;
1707 if (ext_len == 1)
1709 /* We just need to store a single HOST_WIDE_INT. */
1710 HOST_WIDE_INT hwi;
1711 if (TYPE_UNSIGNED (type))
1712 hwi = cst.to_uhwi ();
1713 else
1714 hwi = cst.to_shwi ();
1716 switch (code)
1718 case NULLPTR_TYPE:
1719 gcc_assert (hwi == 0);
1720 /* Fallthru. */
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Ignore pointers, as they were already handled above. */
1725 break;
1727 case BOOLEAN_TYPE:
1728 /* Cache false or true. */
1729 limit = 2;
1730 if (IN_RANGE (hwi, 0, 1))
1731 ix = hwi;
1732 break;
1734 case INTEGER_TYPE:
1735 case OFFSET_TYPE:
1736 case BITINT_TYPE:
1737 if (TYPE_SIGN (type) == UNSIGNED)
1739 /* Cache [0, N). */
1740 limit = param_integer_share_limit;
1741 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1742 ix = hwi;
1744 else
1746 /* Cache [-1, N). */
1747 limit = param_integer_share_limit + 1;
1748 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1749 ix = hwi + 1;
1751 break;
1753 case ENUMERAL_TYPE:
1754 break;
1756 default:
1757 gcc_unreachable ();
1760 if (ix >= 0)
1762 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t) == type
1765 && TREE_INT_CST_NUNITS (t) == 1
1766 && TREE_INT_CST_EXT_NUNITS (t) == 1
1767 && TREE_INT_CST_ELT (t, 0) == hwi);
1768 return t;
1770 else
1772 /* Use the cache of larger shared ints, using int_cst_node as
1773 a temporary. */
1775 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1776 TREE_TYPE (int_cst_node) = type;
1778 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1779 t = *slot;
1780 if (!t)
1782 /* Insert this one into the hash table. */
1783 t = int_cst_node;
1784 *slot = t;
1785 /* Make a new node for next time round. */
1786 int_cst_node = make_int_cst (1, 1);
1790 else
1792 /* The value either hashes properly or we drop it on the floor
1793 for the gc to take care of. There will not be enough of them
1794 to worry about. */
1796 tree nt = build_new_int_cst (type, cst);
1797 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1798 t = *slot;
1799 if (!t)
1801 /* Insert this one into the hash table. */
1802 t = nt;
1803 *slot = t;
1805 else
1806 ggc_free (nt);
1809 return t;
1812 hashval_t
1813 poly_int_cst_hasher::hash (tree t)
1815 inchash::hash hstate;
1817 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1818 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1819 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1821 return hstate.end ();
1824 bool
1825 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1827 if (TREE_TYPE (x) != y.first)
1828 return false;
1829 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1830 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1831 return false;
1832 return true;
1835 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1836 The elements must also have type TYPE. */
1838 tree
1839 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1841 unsigned int prec = TYPE_PRECISION (type);
1842 gcc_assert (prec <= values.coeffs[0].get_precision ());
1843 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1845 inchash::hash h;
1846 h.add_int (TYPE_UID (type));
1847 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1848 h.add_wide_int (c.coeffs[i]);
1849 poly_int_cst_hasher::compare_type comp (type, &c);
1850 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1851 INSERT);
1852 if (*slot == NULL_TREE)
1854 tree coeffs[NUM_POLY_INT_COEFFS];
1855 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1856 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1857 *slot = build_new_poly_int_cst (type, coeffs);
1859 return *slot;
1862 /* Create a constant tree with value VALUE in type TYPE. */
1864 tree
1865 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1867 if (value.is_constant ())
1868 return wide_int_to_tree_1 (type, value.coeffs[0]);
1869 return build_poly_int_cst (type, value);
1872 /* Insert INTEGER_CST T into a cache of integer constants. And return
1873 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1874 is false, and T falls into the type's 'smaller values' range, there
1875 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1876 or the value is large, should an existing entry exist, it is
1877 returned (rather than inserting T). */
1879 tree
1880 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1882 tree type = TREE_TYPE (t);
1883 int ix = -1;
1884 int limit = 0;
1885 int prec = TYPE_PRECISION (type);
1887 gcc_assert (!TREE_OVERFLOW (t));
1889 /* The caching indices here must match those in
1890 wide_int_to_type_1. */
1891 switch (TREE_CODE (type))
1893 case NULLPTR_TYPE:
1894 gcc_checking_assert (integer_zerop (t));
1895 /* Fallthru. */
1897 case POINTER_TYPE:
1898 case REFERENCE_TYPE:
1900 if (integer_zerop (t))
1901 ix = 0;
1902 else if (integer_onep (t))
1903 ix = 2;
1905 if (ix >= 0)
1906 limit = 3;
1908 break;
1910 case BOOLEAN_TYPE:
1911 /* Cache false or true. */
1912 limit = 2;
1913 if (wi::ltu_p (wi::to_wide (t), 2))
1914 ix = TREE_INT_CST_ELT (t, 0);
1915 break;
1917 case INTEGER_TYPE:
1918 case OFFSET_TYPE:
1919 case BITINT_TYPE:
1920 if (TYPE_UNSIGNED (type))
1922 /* Cache 0..N */
1923 limit = param_integer_share_limit;
1925 /* This is a little hokie, but if the prec is smaller than
1926 what is necessary to hold param_integer_share_limit, then the
1927 obvious test will not get the correct answer. */
1928 if (prec < HOST_BITS_PER_WIDE_INT)
1930 if (tree_to_uhwi (t)
1931 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1932 ix = tree_to_uhwi (t);
1934 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1935 ix = tree_to_uhwi (t);
1937 else
1939 /* Cache -1..N */
1940 limit = param_integer_share_limit + 1;
1942 if (integer_minus_onep (t))
1943 ix = 0;
1944 else if (!wi::neg_p (wi::to_wide (t)))
1946 if (prec < HOST_BITS_PER_WIDE_INT)
1948 if (tree_to_shwi (t) < param_integer_share_limit)
1949 ix = tree_to_shwi (t) + 1;
1951 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1952 ix = tree_to_shwi (t) + 1;
1955 break;
1957 case ENUMERAL_TYPE:
1958 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1959 members. */
1960 break;
1962 default:
1963 gcc_unreachable ();
1966 if (ix >= 0)
1968 /* Look for it in the type's vector of small shared ints. */
1969 if (!TYPE_CACHED_VALUES_P (type))
1971 TYPE_CACHED_VALUES_P (type) = 1;
1972 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1975 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1977 gcc_checking_assert (might_duplicate);
1978 t = r;
1980 else
1981 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1983 else
1985 /* Use the cache of larger shared ints. */
1986 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1987 if (tree r = *slot)
1989 /* If there is already an entry for the number verify it's the
1990 same value. */
1991 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1992 /* And return the cached value. */
1993 t = r;
1995 else
1996 /* Otherwise insert this one into the hash table. */
1997 *slot = t;
2000 return t;
2004 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2005 and the rest are zeros. */
2007 tree
2008 build_low_bits_mask (tree type, unsigned bits)
2010 gcc_assert (bits <= TYPE_PRECISION (type));
2012 return wide_int_to_tree (type, wi::mask (bits, false,
2013 TYPE_PRECISION (type)));
2016 /* Checks that X is integer constant that can be expressed in (unsigned)
2017 HOST_WIDE_INT without loss of precision. */
2019 bool
2020 cst_and_fits_in_hwi (const_tree x)
2022 return (TREE_CODE (x) == INTEGER_CST
2023 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2026 /* Build a newly constructed VECTOR_CST with the given values of
2027 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2029 tree
2030 make_vector (unsigned log2_npatterns,
2031 unsigned int nelts_per_pattern MEM_STAT_DECL)
2033 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2034 tree t;
2035 unsigned npatterns = 1 << log2_npatterns;
2036 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2037 unsigned length = (sizeof (struct tree_vector)
2038 + (encoded_nelts - 1) * sizeof (tree));
2040 record_node_allocation_statistics (VECTOR_CST, length);
2042 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2044 TREE_SET_CODE (t, VECTOR_CST);
2045 TREE_CONSTANT (t) = 1;
2046 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2047 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2049 return t;
2052 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2053 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2055 tree
2056 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2058 if (vec_safe_length (v) == 0)
2059 return build_zero_cst (type);
2061 unsigned HOST_WIDE_INT idx, nelts;
2062 tree value;
2064 /* We can't construct a VECTOR_CST for a variable number of elements. */
2065 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2066 tree_vector_builder vec (type, nelts, 1);
2067 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2069 if (TREE_CODE (value) == VECTOR_CST)
2071 /* If NELTS is constant then this must be too. */
2072 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2073 for (unsigned i = 0; i < sub_nelts; ++i)
2074 vec.quick_push (VECTOR_CST_ELT (value, i));
2076 else
2077 vec.quick_push (value);
2079 while (vec.length () < nelts)
2080 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2082 return vec.build ();
2085 /* Build a vector of type VECTYPE where all the elements are SCs. */
2086 tree
2087 build_vector_from_val (tree vectype, tree sc)
2089 unsigned HOST_WIDE_INT i, nunits;
2091 if (sc == error_mark_node)
2092 return sc;
2094 /* Verify that the vector type is suitable for SC. Note that there
2095 is some inconsistency in the type-system with respect to restrict
2096 qualifications of pointers. Vector types always have a main-variant
2097 element type and the qualification is applied to the vector-type.
2098 So TREE_TYPE (vector-type) does not return a properly qualified
2099 vector element-type. */
2100 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2101 TREE_TYPE (vectype)));
2103 if (CONSTANT_CLASS_P (sc))
2105 tree_vector_builder v (vectype, 1, 1);
2106 v.quick_push (sc);
2107 return v.build ();
2109 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2110 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2111 else
2113 vec<constructor_elt, va_gc> *v;
2114 vec_alloc (v, nunits);
2115 for (i = 0; i < nunits; ++i)
2116 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2117 return build_constructor (vectype, v);
2121 /* If TYPE is not a vector type, just return SC, otherwise return
2122 build_vector_from_val (TYPE, SC). */
2124 tree
2125 build_uniform_cst (tree type, tree sc)
2127 if (!VECTOR_TYPE_P (type))
2128 return sc;
2130 return build_vector_from_val (type, sc);
2133 /* Build a vector series of type TYPE in which element I has the value
2134 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2135 and a VEC_SERIES_EXPR otherwise. */
2137 tree
2138 build_vec_series (tree type, tree base, tree step)
2140 if (integer_zerop (step))
2141 return build_vector_from_val (type, base);
2142 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2144 tree_vector_builder builder (type, 1, 3);
2145 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (base) + wi::to_wide (step));
2147 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2148 wi::to_wide (elt1) + wi::to_wide (step));
2149 builder.quick_push (base);
2150 builder.quick_push (elt1);
2151 builder.quick_push (elt2);
2152 return builder.build ();
2154 return build2 (VEC_SERIES_EXPR, type, base, step);
2157 /* Return a vector with the same number of units and number of bits
2158 as VEC_TYPE, but in which the elements are a linear series of unsigned
2159 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2161 tree
2162 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2164 tree index_vec_type = vec_type;
2165 tree index_elt_type = TREE_TYPE (vec_type);
2166 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2167 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2169 index_elt_type = build_nonstandard_integer_type
2170 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2171 index_vec_type = build_vector_type (index_elt_type, nunits);
2174 tree_vector_builder v (index_vec_type, 1, 3);
2175 for (unsigned int i = 0; i < 3; ++i)
2176 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2177 return v.build ();
2180 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2181 elements are A and the rest are B. */
2183 tree
2184 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2186 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2187 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2188 /* Optimize the constant case. */
2189 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2190 count /= 2;
2191 tree_vector_builder builder (vec_type, count, 2);
2192 for (unsigned int i = 0; i < count * 2; ++i)
2193 builder.quick_push (i < num_a ? a : b);
2194 return builder.build ();
2197 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2198 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2200 void
2201 recompute_constructor_flags (tree c)
2203 unsigned int i;
2204 tree val;
2205 bool constant_p = true;
2206 bool side_effects_p = false;
2207 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2209 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2211 /* Mostly ctors will have elts that don't have side-effects, so
2212 the usual case is to scan all the elements. Hence a single
2213 loop for both const and side effects, rather than one loop
2214 each (with early outs). */
2215 if (!TREE_CONSTANT (val))
2216 constant_p = false;
2217 if (TREE_SIDE_EFFECTS (val))
2218 side_effects_p = true;
2221 TREE_SIDE_EFFECTS (c) = side_effects_p;
2222 TREE_CONSTANT (c) = constant_p;
2225 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2226 CONSTRUCTOR C. */
2228 void
2229 verify_constructor_flags (tree c)
2231 unsigned int i;
2232 tree val;
2233 bool constant_p = TREE_CONSTANT (c);
2234 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2235 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2237 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2239 if (constant_p && !TREE_CONSTANT (val))
2240 internal_error ("non-constant element in constant CONSTRUCTOR");
2241 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2242 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2246 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2247 are in the vec pointed to by VALS. */
2248 tree
2249 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2251 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2253 TREE_TYPE (c) = type;
2254 CONSTRUCTOR_ELTS (c) = vals;
2256 recompute_constructor_flags (c);
2258 return c;
2261 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2262 INDEX and VALUE. */
2263 tree
2264 build_constructor_single (tree type, tree index, tree value)
2266 vec<constructor_elt, va_gc> *v;
2267 constructor_elt elt = {index, value};
2269 vec_alloc (v, 1);
2270 v->quick_push (elt);
2272 return build_constructor (type, v);
2276 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2277 are in a list pointed to by VALS. */
2278 tree
2279 build_constructor_from_list (tree type, tree vals)
2281 tree t;
2282 vec<constructor_elt, va_gc> *v = NULL;
2284 if (vals)
2286 vec_alloc (v, list_length (vals));
2287 for (t = vals; t; t = TREE_CHAIN (t))
2288 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2291 return build_constructor (type, v);
2294 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2295 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2296 fields in the constructor remain null. */
2298 tree
2299 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2301 vec<constructor_elt, va_gc> *v = NULL;
2303 for (tree t : vals)
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2306 return build_constructor (type, v);
2309 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2310 of elements, provided as index/value pairs. */
2312 tree
2313 build_constructor_va (tree type, int nelts, ...)
2315 vec<constructor_elt, va_gc> *v = NULL;
2316 va_list p;
2318 va_start (p, nelts);
2319 vec_alloc (v, nelts);
2320 while (nelts--)
2322 tree index = va_arg (p, tree);
2323 tree value = va_arg (p, tree);
2324 CONSTRUCTOR_APPEND_ELT (v, index, value);
2326 va_end (p);
2327 return build_constructor (type, v);
2330 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2332 tree
2333 build_clobber (tree type, enum clobber_kind kind)
2335 tree clobber = build_constructor (type, NULL);
2336 TREE_THIS_VOLATILE (clobber) = true;
2337 CLOBBER_KIND (clobber) = kind;
2338 return clobber;
2341 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2343 tree
2344 build_fixed (tree type, FIXED_VALUE_TYPE f)
2346 tree v;
2347 FIXED_VALUE_TYPE *fp;
2349 v = make_node (FIXED_CST);
2350 fp = ggc_alloc<fixed_value> ();
2351 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2353 TREE_TYPE (v) = type;
2354 TREE_FIXED_CST_PTR (v) = fp;
2355 return v;
2358 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2360 tree
2361 build_real (tree type, REAL_VALUE_TYPE d)
2363 tree v;
2364 int overflow = 0;
2366 /* dconst{0,1,2,m1,half} are used in various places in
2367 the middle-end and optimizers, allow them here
2368 even for decimal floating point types as an exception
2369 by converting them to decimal. */
2370 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2371 && (d.cl == rvc_normal || d.cl == rvc_zero)
2372 && !d.decimal)
2374 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "1");
2376 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "2");
2378 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "-1");
2380 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "0.5");
2382 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2384 /* Make sure to give zero the minimum quantum exponent for
2385 the type (which corresponds to all bits zero). */
2386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2387 char buf[16];
2388 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2389 decimal_real_from_string (&d, buf);
2391 else
2392 gcc_unreachable ();
2395 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2396 Consider doing it via real_convert now. */
2398 v = make_node (REAL_CST);
2399 TREE_TYPE (v) = type;
2400 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2401 TREE_OVERFLOW (v) = overflow;
2402 return v;
2405 /* Like build_real, but first truncate D to the type. */
2407 tree
2408 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2410 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value of the INTEGER_CST node I. */
2416 REAL_VALUE_TYPE
2417 real_value_from_int_cst (const_tree type, const_tree i)
2419 REAL_VALUE_TYPE d;
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d, 0, sizeof d);
2425 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2426 TYPE_SIGN (TREE_TYPE (i)));
2427 return d;
2430 /* Given a tree representing an integer constant I, return a tree
2431 representing the same value as a floating-point constant of type TYPE. */
2433 tree
2434 build_real_from_int_cst (tree type, const_tree i)
2436 tree v;
2437 int overflow = TREE_OVERFLOW (i);
2439 v = build_real (type, real_value_from_int_cst (type, i));
2441 TREE_OVERFLOW (v) |= overflow;
2442 return v;
2445 /* Return a new REAL_CST node whose type is TYPE
2446 and whose value is the integer value I which has sign SGN. */
2448 tree
2449 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2451 REAL_VALUE_TYPE d;
2453 /* Clear all bits of the real value type so that we can later do
2454 bitwise comparisons to see if two values are the same. */
2455 memset (&d, 0, sizeof d);
2457 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2458 return build_real (type, d);
2461 /* Return a newly constructed STRING_CST node whose value is the LEN
2462 characters at STR when STR is nonnull, or all zeros otherwise.
2463 Note that for a C string literal, LEN should include the trailing NUL.
2464 The TREE_TYPE is not initialized. */
2466 tree
2467 build_string (unsigned len, const char *str /*= NULL */)
2469 /* Do not waste bytes provided by padding of struct tree_string. */
2470 unsigned size = len + offsetof (struct tree_string, str) + 1;
2472 record_node_allocation_statistics (STRING_CST, size);
2474 tree s = (tree) ggc_internal_alloc (size);
2476 memset (s, 0, sizeof (struct tree_typed));
2477 TREE_SET_CODE (s, STRING_CST);
2478 TREE_CONSTANT (s) = 1;
2479 TREE_STRING_LENGTH (s) = len;
2480 if (str)
2481 memcpy (s->string.str, str, len);
2482 else
2483 memset (s->string.str, 0, len);
2484 s->string.str[len] = '\0';
2486 return s;
2489 /* Return a newly constructed COMPLEX_CST node whose value is
2490 specified by the real and imaginary parts REAL and IMAG.
2491 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2492 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2494 tree
2495 build_complex (tree type, tree real, tree imag)
2497 gcc_assert (CONSTANT_CLASS_P (real));
2498 gcc_assert (CONSTANT_CLASS_P (imag));
2500 tree t = make_node (COMPLEX_CST);
2502 TREE_REALPART (t) = real;
2503 TREE_IMAGPART (t) = imag;
2504 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2505 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2506 return t;
2509 /* Build a complex (inf +- 0i), such as for the result of cproj.
2510 TYPE is the complex tree type of the result. If NEG is true, the
2511 imaginary zero is negative. */
2513 tree
2514 build_complex_inf (tree type, bool neg)
2516 REAL_VALUE_TYPE rzero = dconst0;
2518 rzero.sign = neg;
2519 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2520 build_real (TREE_TYPE (type), rzero));
2523 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2524 element is set to 1. In particular, this is 1 + i for complex types. */
2526 tree
2527 build_each_one_cst (tree type)
2529 if (TREE_CODE (type) == COMPLEX_TYPE)
2531 tree scalar = build_one_cst (TREE_TYPE (type));
2532 return build_complex (type, scalar, scalar);
2534 else
2535 return build_one_cst (type);
2538 /* Return a constant of arithmetic type TYPE which is the
2539 multiplicative identity of the set TYPE. */
2541 tree
2542 build_one_cst (tree type)
2544 switch (TREE_CODE (type))
2546 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2547 case POINTER_TYPE: case REFERENCE_TYPE:
2548 case OFFSET_TYPE: case BITINT_TYPE:
2549 return build_int_cst (type, 1);
2551 case REAL_TYPE:
2552 return build_real (type, dconst1);
2554 case FIXED_POINT_TYPE:
2555 /* We can only generate 1 for accum types. */
2556 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2557 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2559 case VECTOR_TYPE:
2561 tree scalar = build_one_cst (TREE_TYPE (type));
2563 return build_vector_from_val (type, scalar);
2566 case COMPLEX_TYPE:
2567 return build_complex (type,
2568 build_one_cst (TREE_TYPE (type)),
2569 build_zero_cst (TREE_TYPE (type)));
2571 default:
2572 gcc_unreachable ();
2576 /* Return an integer of type TYPE containing all 1's in as much precision as
2577 it contains, or a complex or vector whose subparts are such integers. */
2579 tree
2580 build_all_ones_cst (tree type)
2582 if (TREE_CODE (type) == COMPLEX_TYPE)
2584 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2585 return build_complex (type, scalar, scalar);
2587 else
2588 return build_minus_one_cst (type);
2591 /* Return a constant of arithmetic type TYPE which is the
2592 opposite of the multiplicative identity of the set TYPE. */
2594 tree
2595 build_minus_one_cst (tree type)
2597 switch (TREE_CODE (type))
2599 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2600 case POINTER_TYPE: case REFERENCE_TYPE:
2601 case OFFSET_TYPE: case BITINT_TYPE:
2602 return build_int_cst (type, -1);
2604 case REAL_TYPE:
2605 return build_real (type, dconstm1);
2607 case FIXED_POINT_TYPE:
2608 /* We can only generate 1 for accum types. */
2609 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2610 return build_fixed (type,
2611 fixed_from_double_int (double_int_minus_one,
2612 SCALAR_TYPE_MODE (type)));
2614 case VECTOR_TYPE:
2616 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2618 return build_vector_from_val (type, scalar);
2621 case COMPLEX_TYPE:
2622 return build_complex (type,
2623 build_minus_one_cst (TREE_TYPE (type)),
2624 build_zero_cst (TREE_TYPE (type)));
2626 default:
2627 gcc_unreachable ();
2631 /* Build 0 constant of type TYPE. This is used by constructor folding
2632 and thus the constant should be represented in memory by
2633 zero(es). */
2635 tree
2636 build_zero_cst (tree type)
2638 switch (TREE_CODE (type))
2640 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2641 case POINTER_TYPE: case REFERENCE_TYPE:
2642 case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
2643 return build_int_cst (type, 0);
2645 case REAL_TYPE:
2646 return build_real (type, dconst0);
2648 case FIXED_POINT_TYPE:
2649 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2651 case VECTOR_TYPE:
2653 tree scalar = build_zero_cst (TREE_TYPE (type));
2655 return build_vector_from_val (type, scalar);
2658 case COMPLEX_TYPE:
2660 tree zero = build_zero_cst (TREE_TYPE (type));
2662 return build_complex (type, zero, zero);
2665 default:
2666 if (!AGGREGATE_TYPE_P (type))
2667 return fold_convert (type, integer_zero_node);
2668 return build_constructor (type, NULL);
2672 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2673 every WIDTH bits to fit TYPE's precision. */
2675 tree
2676 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2678 int n = ((TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2679 / HOST_BITS_PER_WIDE_INT);
2680 unsigned HOST_WIDE_INT low, mask;
2681 HOST_WIDE_INT a[WIDE_INT_MAX_INL_ELTS];
2682 int i;
2684 gcc_assert (n && n <= WIDE_INT_MAX_INL_ELTS);
2686 if (width == HOST_BITS_PER_WIDE_INT)
2687 low = value;
2688 else
2690 mask = ((HOST_WIDE_INT)1 << width) - 1;
2691 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2694 for (i = 0; i < n; i++)
2695 a[i] = low;
2697 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2698 return wide_int_to_tree (type, wide_int::from_array (a, n,
2699 TYPE_PRECISION (type)));
2702 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2703 unsigned constant in which only the sign bit is set. Return null
2704 otherwise. */
2706 tree
2707 sign_mask_for (tree type)
2709 /* Avoid having to choose between a real-only sign and a pair of signs.
2710 This could be relaxed if the choice becomes obvious later. */
2711 if (TREE_CODE (type) == COMPLEX_TYPE)
2712 return NULL_TREE;
2714 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2715 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2716 if (!bits || !pow2p_hwi (bits))
2717 return NULL_TREE;
2719 tree inttype = unsigned_type_for (type);
2720 if (!inttype)
2721 return NULL_TREE;
2723 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2724 if (VECTOR_TYPE_P (inttype))
2726 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2727 return build_vector_from_val (inttype, elt);
2729 return wide_int_to_tree (inttype, mask);
2732 /* Build a BINFO with LEN language slots. */
2734 tree
2735 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2737 tree t;
2738 size_t length = (offsetof (struct tree_binfo, base_binfos)
2739 + vec<tree, va_gc>::embedded_size (base_binfos));
2741 record_node_allocation_statistics (TREE_BINFO, length);
2743 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2745 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2747 TREE_SET_CODE (t, TREE_BINFO);
2749 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2751 return t;
2754 /* Create a CASE_LABEL_EXPR tree node and return it. */
2756 tree
2757 build_case_label (tree low_value, tree high_value, tree label_decl)
2759 tree t = make_node (CASE_LABEL_EXPR);
2761 TREE_TYPE (t) = void_type_node;
2762 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2764 CASE_LOW (t) = low_value;
2765 CASE_HIGH (t) = high_value;
2766 CASE_LABEL (t) = label_decl;
2767 CASE_CHAIN (t) = NULL_TREE;
2769 return t;
2772 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2773 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2774 The latter determines the length of the HOST_WIDE_INT vector. */
2776 tree
2777 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2779 tree t;
2780 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2781 + sizeof (struct tree_int_cst));
2783 gcc_assert (len);
2784 record_node_allocation_statistics (INTEGER_CST, length);
2786 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2788 TREE_SET_CODE (t, INTEGER_CST);
2789 TREE_INT_CST_NUNITS (t) = len;
2790 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2791 TREE_CONSTANT (t) = 1;
2793 return t;
2796 /* Build a newly constructed TREE_VEC node of length LEN. */
2798 tree
2799 make_tree_vec (int len MEM_STAT_DECL)
2801 tree t;
2802 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2804 record_node_allocation_statistics (TREE_VEC, length);
2806 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2808 TREE_SET_CODE (t, TREE_VEC);
2809 TREE_VEC_LENGTH (t) = len;
2811 return t;
2814 /* Grow a TREE_VEC node to new length LEN. */
2816 tree
2817 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2819 gcc_assert (TREE_CODE (v) == TREE_VEC);
2821 int oldlen = TREE_VEC_LENGTH (v);
2822 gcc_assert (len > oldlen);
2824 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2825 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2827 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2829 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2831 TREE_VEC_LENGTH (v) = len;
2833 return v;
2836 /* Return true if EXPR is the constant zero, whether it is integral, float or
2837 fixed, and scalar, complex or vector. */
2839 bool
2840 zerop (const_tree expr)
2842 return (integer_zerop (expr)
2843 || real_zerop (expr)
2844 || fixed_zerop (expr));
2847 /* Return true if EXPR is the integer constant zero or a complex constant
2848 of zero, or a location wrapper for such a constant. */
2850 bool
2851 integer_zerop (const_tree expr)
2853 STRIP_ANY_LOCATION_WRAPPER (expr);
2855 switch (TREE_CODE (expr))
2857 case INTEGER_CST:
2858 return wi::to_wide (expr) == 0;
2859 case COMPLEX_CST:
2860 return (integer_zerop (TREE_REALPART (expr))
2861 && integer_zerop (TREE_IMAGPART (expr)));
2862 case VECTOR_CST:
2863 return (VECTOR_CST_NPATTERNS (expr) == 1
2864 && VECTOR_CST_DUPLICATE_P (expr)
2865 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2866 default:
2867 return false;
2871 /* Return true if EXPR is the integer constant one or the corresponding
2872 complex constant, or a location wrapper for such a constant. */
2874 bool
2875 integer_onep (const_tree expr)
2877 STRIP_ANY_LOCATION_WRAPPER (expr);
2879 switch (TREE_CODE (expr))
2881 case INTEGER_CST:
2882 return wi::eq_p (wi::to_widest (expr), 1);
2883 case COMPLEX_CST:
2884 return (integer_onep (TREE_REALPART (expr))
2885 && integer_zerop (TREE_IMAGPART (expr)));
2886 case VECTOR_CST:
2887 return (VECTOR_CST_NPATTERNS (expr) == 1
2888 && VECTOR_CST_DUPLICATE_P (expr)
2889 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2890 default:
2891 return false;
2895 /* Return true if EXPR is the integer constant one. For complex and vector,
2896 return true if every piece is the integer constant one.
2897 Also return true for location wrappers for such a constant. */
2899 bool
2900 integer_each_onep (const_tree expr)
2902 STRIP_ANY_LOCATION_WRAPPER (expr);
2904 if (TREE_CODE (expr) == COMPLEX_CST)
2905 return (integer_onep (TREE_REALPART (expr))
2906 && integer_onep (TREE_IMAGPART (expr)));
2907 else
2908 return integer_onep (expr);
2911 /* Return true if EXPR is an integer containing all 1's in as much precision
2912 as it contains, or a complex or vector whose subparts are such integers,
2913 or a location wrapper for such a constant. */
2915 bool
2916 integer_all_onesp (const_tree expr)
2918 STRIP_ANY_LOCATION_WRAPPER (expr);
2920 if (TREE_CODE (expr) == COMPLEX_CST
2921 && integer_all_onesp (TREE_REALPART (expr))
2922 && integer_all_onesp (TREE_IMAGPART (expr)))
2923 return true;
2925 else if (TREE_CODE (expr) == VECTOR_CST)
2926 return (VECTOR_CST_NPATTERNS (expr) == 1
2927 && VECTOR_CST_DUPLICATE_P (expr)
2928 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2930 else if (TREE_CODE (expr) != INTEGER_CST)
2931 return false;
2933 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2934 == wi::to_wide (expr));
2937 /* Return true if EXPR is the integer constant minus one, or a location
2938 wrapper for such a constant. */
2940 bool
2941 integer_minus_onep (const_tree expr)
2943 STRIP_ANY_LOCATION_WRAPPER (expr);
2945 if (TREE_CODE (expr) == COMPLEX_CST)
2946 return (integer_all_onesp (TREE_REALPART (expr))
2947 && integer_zerop (TREE_IMAGPART (expr)));
2948 else
2949 return integer_all_onesp (expr);
2952 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2953 only one bit on), or a location wrapper for such a constant. */
2955 bool
2956 integer_pow2p (const_tree expr)
2958 STRIP_ANY_LOCATION_WRAPPER (expr);
2960 if (TREE_CODE (expr) == COMPLEX_CST
2961 && integer_pow2p (TREE_REALPART (expr))
2962 && integer_zerop (TREE_IMAGPART (expr)))
2963 return true;
2965 if (TREE_CODE (expr) != INTEGER_CST)
2966 return false;
2968 return wi::popcount (wi::to_wide (expr)) == 1;
2971 /* Return true if EXPR is an integer constant other than zero or a
2972 complex constant other than zero, or a location wrapper for such a
2973 constant. */
2975 bool
2976 integer_nonzerop (const_tree expr)
2978 STRIP_ANY_LOCATION_WRAPPER (expr);
2980 return ((TREE_CODE (expr) == INTEGER_CST
2981 && wi::to_wide (expr) != 0)
2982 || (TREE_CODE (expr) == COMPLEX_CST
2983 && (integer_nonzerop (TREE_REALPART (expr))
2984 || integer_nonzerop (TREE_IMAGPART (expr)))));
2987 /* Return true if EXPR is the integer constant one. For vector,
2988 return true if every piece is the integer constant minus one
2989 (representing the value TRUE).
2990 Also return true for location wrappers for such a constant. */
2992 bool
2993 integer_truep (const_tree expr)
2995 STRIP_ANY_LOCATION_WRAPPER (expr);
2997 if (TREE_CODE (expr) == VECTOR_CST)
2998 return integer_all_onesp (expr);
2999 return integer_onep (expr);
3002 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3003 for such a constant. */
3005 bool
3006 fixed_zerop (const_tree expr)
3008 STRIP_ANY_LOCATION_WRAPPER (expr);
3010 return (TREE_CODE (expr) == FIXED_CST
3011 && TREE_FIXED_CST (expr).data.is_zero ());
3014 /* Return the power of two represented by a tree node known to be a
3015 power of two. */
3018 tree_log2 (const_tree expr)
3020 if (TREE_CODE (expr) == COMPLEX_CST)
3021 return tree_log2 (TREE_REALPART (expr));
3023 return wi::exact_log2 (wi::to_wide (expr));
3026 /* Similar, but return the largest integer Y such that 2 ** Y is less
3027 than or equal to EXPR. */
3030 tree_floor_log2 (const_tree expr)
3032 if (TREE_CODE (expr) == COMPLEX_CST)
3033 return tree_log2 (TREE_REALPART (expr));
3035 return wi::floor_log2 (wi::to_wide (expr));
3038 /* Return number of known trailing zero bits in EXPR, or, if the value of
3039 EXPR is known to be zero, the precision of it's type. */
3041 unsigned int
3042 tree_ctz (const_tree expr)
3044 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3045 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3046 return 0;
3048 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3049 switch (TREE_CODE (expr))
3051 case INTEGER_CST:
3052 ret1 = wi::ctz (wi::to_wide (expr));
3053 return MIN (ret1, prec);
3054 case SSA_NAME:
3055 ret1 = wi::ctz (get_nonzero_bits (expr));
3056 return MIN (ret1, prec);
3057 case PLUS_EXPR:
3058 case MINUS_EXPR:
3059 case BIT_IOR_EXPR:
3060 case BIT_XOR_EXPR:
3061 case MIN_EXPR:
3062 case MAX_EXPR:
3063 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3064 if (ret1 == 0)
3065 return ret1;
3066 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3067 return MIN (ret1, ret2);
3068 case POINTER_PLUS_EXPR:
3069 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3070 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3071 /* Second operand is sizetype, which could be in theory
3072 wider than pointer's precision. Make sure we never
3073 return more than prec. */
3074 ret2 = MIN (ret2, prec);
3075 return MIN (ret1, ret2);
3076 case BIT_AND_EXPR:
3077 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3078 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3079 return MAX (ret1, ret2);
3080 case MULT_EXPR:
3081 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3082 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3083 return MIN (ret1 + ret2, prec);
3084 case LSHIFT_EXPR:
3085 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3086 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3087 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3089 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3090 return MIN (ret1 + ret2, prec);
3092 return ret1;
3093 case RSHIFT_EXPR:
3094 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3095 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3097 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3098 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3099 if (ret1 > ret2)
3100 return ret1 - ret2;
3102 return 0;
3103 case TRUNC_DIV_EXPR:
3104 case CEIL_DIV_EXPR:
3105 case FLOOR_DIV_EXPR:
3106 case ROUND_DIV_EXPR:
3107 case EXACT_DIV_EXPR:
3108 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3109 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3111 int l = tree_log2 (TREE_OPERAND (expr, 1));
3112 if (l >= 0)
3114 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3115 ret2 = l;
3116 if (ret1 > ret2)
3117 return ret1 - ret2;
3120 return 0;
3121 CASE_CONVERT:
3122 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3123 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3124 ret1 = prec;
3125 return MIN (ret1, prec);
3126 case SAVE_EXPR:
3127 return tree_ctz (TREE_OPERAND (expr, 0));
3128 case COND_EXPR:
3129 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3130 if (ret1 == 0)
3131 return 0;
3132 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3133 return MIN (ret1, ret2);
3134 case COMPOUND_EXPR:
3135 return tree_ctz (TREE_OPERAND (expr, 1));
3136 case ADDR_EXPR:
3137 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3138 if (ret1 > BITS_PER_UNIT)
3140 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3141 return MIN (ret1, prec);
3143 return 0;
3144 default:
3145 return 0;
3149 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3150 decimal float constants, so don't return true for them.
3151 Also return true for location wrappers around such a constant. */
3153 bool
3154 real_zerop (const_tree expr)
3156 STRIP_ANY_LOCATION_WRAPPER (expr);
3158 switch (TREE_CODE (expr))
3160 case REAL_CST:
3161 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3162 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3163 case COMPLEX_CST:
3164 return real_zerop (TREE_REALPART (expr))
3165 && real_zerop (TREE_IMAGPART (expr));
3166 case VECTOR_CST:
3168 /* Don't simply check for a duplicate because the predicate
3169 accepts both +0.0 and -0.0. */
3170 unsigned count = vector_cst_encoded_nelts (expr);
3171 for (unsigned int i = 0; i < count; ++i)
3172 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3173 return false;
3174 return true;
3176 default:
3177 return false;
3181 /* Return true if EXPR is the real constant one in real or complex form.
3182 Trailing zeroes matter for decimal float constants, so don't return
3183 true for them.
3184 Also return true for location wrappers around such a constant. */
3186 bool
3187 real_onep (const_tree expr)
3189 STRIP_ANY_LOCATION_WRAPPER (expr);
3191 switch (TREE_CODE (expr))
3193 case REAL_CST:
3194 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3195 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3196 case COMPLEX_CST:
3197 return real_onep (TREE_REALPART (expr))
3198 && real_zerop (TREE_IMAGPART (expr));
3199 case VECTOR_CST:
3200 return (VECTOR_CST_NPATTERNS (expr) == 1
3201 && VECTOR_CST_DUPLICATE_P (expr)
3202 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3203 default:
3204 return false;
3208 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3209 matter for decimal float constants, so don't return true for them.
3210 Also return true for location wrappers around such a constant. */
3212 bool
3213 real_minus_onep (const_tree expr)
3215 STRIP_ANY_LOCATION_WRAPPER (expr);
3217 switch (TREE_CODE (expr))
3219 case REAL_CST:
3220 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3221 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3222 case COMPLEX_CST:
3223 return real_minus_onep (TREE_REALPART (expr))
3224 && real_zerop (TREE_IMAGPART (expr));
3225 case VECTOR_CST:
3226 return (VECTOR_CST_NPATTERNS (expr) == 1
3227 && VECTOR_CST_DUPLICATE_P (expr)
3228 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3229 default:
3230 return false;
3234 /* Return true if T could be a floating point zero. */
3236 bool
3237 real_maybe_zerop (const_tree expr)
3239 switch (TREE_CODE (expr))
3241 case REAL_CST:
3242 /* Can't use real_zerop here, as it always returns false for decimal
3243 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3244 either, as decimal zeros are rvc_normal. */
3245 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3246 case COMPLEX_CST:
3247 return (real_maybe_zerop (TREE_REALPART (expr))
3248 || real_maybe_zerop (TREE_IMAGPART (expr)));
3249 case VECTOR_CST:
3251 unsigned count = vector_cst_encoded_nelts (expr);
3252 for (unsigned int i = 0; i < count; ++i)
3253 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3254 return true;
3255 return false;
3257 default:
3258 /* Perhaps for SSA_NAMEs we could query frange. */
3259 return true;
3263 /* True if EXP is a constant or a cast of a constant. */
3265 bool
3266 really_constant_p (const_tree exp)
3268 /* This is not quite the same as STRIP_NOPS. It does more. */
3269 while (CONVERT_EXPR_P (exp)
3270 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3271 exp = TREE_OPERAND (exp, 0);
3272 return TREE_CONSTANT (exp);
3275 /* Return true if T holds a polynomial pointer difference, storing it in
3276 *VALUE if so. A true return means that T's precision is no greater
3277 than 64 bits, which is the largest address space we support, so *VALUE
3278 never loses precision. However, the signedness of the result does
3279 not necessarily match the signedness of T: sometimes an unsigned type
3280 like sizetype is used to encode a value that is actually negative. */
3282 bool
3283 ptrdiff_tree_p (const_tree t, poly_int64 *value)
3285 if (!t)
3286 return false;
3287 if (TREE_CODE (t) == INTEGER_CST)
3289 if (!cst_and_fits_in_hwi (t))
3290 return false;
3291 *value = int_cst_value (t);
3292 return true;
3294 if (POLY_INT_CST_P (t))
3296 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3297 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3298 return false;
3299 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3300 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3301 return true;
3303 return false;
3306 poly_int64
3307 tree_to_poly_int64 (const_tree t)
3309 gcc_assert (tree_fits_poly_int64_p (t));
3310 if (POLY_INT_CST_P (t))
3311 return poly_int_cst_value (t).force_shwi ();
3312 return TREE_INT_CST_LOW (t);
3315 poly_uint64
3316 tree_to_poly_uint64 (const_tree t)
3318 gcc_assert (tree_fits_poly_uint64_p (t));
3319 if (POLY_INT_CST_P (t))
3320 return poly_int_cst_value (t).force_uhwi ();
3321 return TREE_INT_CST_LOW (t);
3324 /* Return first list element whose TREE_VALUE is ELEM.
3325 Return 0 if ELEM is not in LIST. */
3327 tree
3328 value_member (tree elem, tree list)
3330 while (list)
3332 if (elem == TREE_VALUE (list))
3333 return list;
3334 list = TREE_CHAIN (list);
3336 return NULL_TREE;
3339 /* Return first list element whose TREE_PURPOSE is ELEM.
3340 Return 0 if ELEM is not in LIST. */
3342 tree
3343 purpose_member (const_tree elem, tree list)
3345 while (list)
3347 if (elem == TREE_PURPOSE (list))
3348 return list;
3349 list = TREE_CHAIN (list);
3351 return NULL_TREE;
3354 /* Return true if ELEM is in V. */
3356 bool
3357 vec_member (const_tree elem, vec<tree, va_gc> *v)
3359 unsigned ix;
3360 tree t;
3361 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3362 if (elem == t)
3363 return true;
3364 return false;
3367 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3368 NULL_TREE. */
3370 tree
3371 chain_index (int idx, tree chain)
3373 for (; chain && idx > 0; --idx)
3374 chain = TREE_CHAIN (chain);
3375 return chain;
3378 /* Return true if ELEM is part of the chain CHAIN. */
3380 bool
3381 chain_member (const_tree elem, const_tree chain)
3383 while (chain)
3385 if (elem == chain)
3386 return true;
3387 chain = DECL_CHAIN (chain);
3390 return false;
3393 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3394 We expect a null pointer to mark the end of the chain.
3395 This is the Lisp primitive `length'. */
3398 list_length (const_tree t)
3400 const_tree p = t;
3401 #ifdef ENABLE_TREE_CHECKING
3402 const_tree q = t;
3403 #endif
3404 int len = 0;
3406 while (p)
3408 p = TREE_CHAIN (p);
3409 #ifdef ENABLE_TREE_CHECKING
3410 if (len % 2)
3411 q = TREE_CHAIN (q);
3412 gcc_assert (p != q);
3413 #endif
3414 len++;
3417 return len;
3420 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3421 UNION_TYPE TYPE, or NULL_TREE if none. */
3423 tree
3424 first_field (const_tree type)
3426 tree t = TYPE_FIELDS (type);
3427 while (t && TREE_CODE (t) != FIELD_DECL)
3428 t = TREE_CHAIN (t);
3429 return t;
3432 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3433 UNION_TYPE TYPE, or NULL_TREE if none. */
3435 tree
3436 last_field (const_tree type)
3438 tree last = NULL_TREE;
3440 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3442 if (TREE_CODE (fld) != FIELD_DECL)
3443 continue;
3445 last = fld;
3448 return last;
3451 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3452 by modifying the last node in chain 1 to point to chain 2.
3453 This is the Lisp primitive `nconc'. */
3455 tree
3456 chainon (tree op1, tree op2)
3458 tree t1;
3460 if (!op1)
3461 return op2;
3462 if (!op2)
3463 return op1;
3465 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3466 continue;
3467 TREE_CHAIN (t1) = op2;
3469 #ifdef ENABLE_TREE_CHECKING
3471 tree t2;
3472 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3473 gcc_assert (t2 != t1);
3475 #endif
3477 return op1;
3480 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3482 tree
3483 tree_last (tree chain)
3485 tree next;
3486 if (chain)
3487 while ((next = TREE_CHAIN (chain)))
3488 chain = next;
3489 return chain;
3492 /* Reverse the order of elements in the chain T,
3493 and return the new head of the chain (old last element). */
3495 tree
3496 nreverse (tree t)
3498 tree prev = 0, decl, next;
3499 for (decl = t; decl; decl = next)
3501 /* We shouldn't be using this function to reverse BLOCK chains; we
3502 have blocks_nreverse for that. */
3503 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3504 next = TREE_CHAIN (decl);
3505 TREE_CHAIN (decl) = prev;
3506 prev = decl;
3508 return prev;
3511 /* Return a newly created TREE_LIST node whose
3512 purpose and value fields are PARM and VALUE. */
3514 tree
3515 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3517 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3518 TREE_PURPOSE (t) = parm;
3519 TREE_VALUE (t) = value;
3520 return t;
3523 /* Build a chain of TREE_LIST nodes from a vector. */
3525 tree
3526 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3528 tree ret = NULL_TREE;
3529 tree *pp = &ret;
3530 unsigned int i;
3531 tree t;
3532 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3534 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3535 pp = &TREE_CHAIN (*pp);
3537 return ret;
3540 /* Return a newly created TREE_LIST node whose
3541 purpose and value fields are PURPOSE and VALUE
3542 and whose TREE_CHAIN is CHAIN. */
3544 tree
3545 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3547 tree node;
3549 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3550 memset (node, 0, sizeof (struct tree_common));
3552 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3554 TREE_SET_CODE (node, TREE_LIST);
3555 TREE_CHAIN (node) = chain;
3556 TREE_PURPOSE (node) = purpose;
3557 TREE_VALUE (node) = value;
3558 return node;
3561 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3562 trees. */
3564 vec<tree, va_gc> *
3565 ctor_to_vec (tree ctor)
3567 vec<tree, va_gc> *vec;
3568 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3569 unsigned int ix;
3570 tree val;
3572 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3573 vec->quick_push (val);
3575 return vec;
3578 /* Return the size nominally occupied by an object of type TYPE
3579 when it resides in memory. The value is measured in units of bytes,
3580 and its data type is that normally used for type sizes
3581 (which is the first type created by make_signed_type or
3582 make_unsigned_type). */
3584 tree
3585 size_in_bytes_loc (location_t loc, const_tree type)
3587 tree t;
3589 if (type == error_mark_node)
3590 return integer_zero_node;
3592 type = TYPE_MAIN_VARIANT (type);
3593 t = TYPE_SIZE_UNIT (type);
3595 if (t == 0)
3597 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3598 return size_zero_node;
3601 return t;
3604 /* Return the size of TYPE (in bytes) as a wide integer
3605 or return -1 if the size can vary or is larger than an integer. */
3607 HOST_WIDE_INT
3608 int_size_in_bytes (const_tree type)
3610 tree t;
3612 if (type == error_mark_node)
3613 return 0;
3615 type = TYPE_MAIN_VARIANT (type);
3616 t = TYPE_SIZE_UNIT (type);
3618 if (t && tree_fits_uhwi_p (t))
3619 return TREE_INT_CST_LOW (t);
3620 else
3621 return -1;
3624 /* Return the maximum size of TYPE (in bytes) as a wide integer
3625 or return -1 if the size can vary or is larger than an integer. */
3627 HOST_WIDE_INT
3628 max_int_size_in_bytes (const_tree type)
3630 HOST_WIDE_INT size = -1;
3631 tree size_tree;
3633 /* If this is an array type, check for a possible MAX_SIZE attached. */
3635 if (TREE_CODE (type) == ARRAY_TYPE)
3637 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3639 if (size_tree && tree_fits_uhwi_p (size_tree))
3640 size = tree_to_uhwi (size_tree);
3643 /* If we still haven't been able to get a size, see if the language
3644 can compute a maximum size. */
3646 if (size == -1)
3648 size_tree = lang_hooks.types.max_size (type);
3650 if (size_tree && tree_fits_uhwi_p (size_tree))
3651 size = tree_to_uhwi (size_tree);
3654 return size;
3657 /* Return the bit position of FIELD, in bits from the start of the record.
3658 This is a tree of type bitsizetype. */
3660 tree
3661 bit_position (const_tree field)
3663 return bit_from_pos (DECL_FIELD_OFFSET (field),
3664 DECL_FIELD_BIT_OFFSET (field));
3667 /* Return the byte position of FIELD, in bytes from the start of the record.
3668 This is a tree of type sizetype. */
3670 tree
3671 byte_position (const_tree field)
3673 return byte_from_pos (DECL_FIELD_OFFSET (field),
3674 DECL_FIELD_BIT_OFFSET (field));
3677 /* Likewise, but return as an integer. It must be representable in
3678 that way (since it could be a signed value, we don't have the
3679 option of returning -1 like int_size_in_byte can. */
3681 HOST_WIDE_INT
3682 int_byte_position (const_tree field)
3684 return tree_to_shwi (byte_position (field));
3687 /* Return, as a tree node, the number of elements for TYPE (which is an
3688 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3690 tree
3691 array_type_nelts (const_tree type)
3693 tree index_type, min, max;
3695 /* If they did it with unspecified bounds, then we should have already
3696 given an error about it before we got here. */
3697 if (! TYPE_DOMAIN (type))
3698 return error_mark_node;
3700 index_type = TYPE_DOMAIN (type);
3701 min = TYPE_MIN_VALUE (index_type);
3702 max = TYPE_MAX_VALUE (index_type);
3704 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3705 if (!max)
3707 /* zero sized arrays are represented from C FE as complete types with
3708 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3709 them as min 0, max -1. */
3710 if (COMPLETE_TYPE_P (type)
3711 && integer_zerop (TYPE_SIZE (type))
3712 && integer_zerop (min))
3713 return build_int_cst (TREE_TYPE (min), -1);
3715 return error_mark_node;
3718 return (integer_zerop (min)
3719 ? max
3720 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3723 /* If arg is static -- a reference to an object in static storage -- then
3724 return the object. This is not the same as the C meaning of `static'.
3725 If arg isn't static, return NULL. */
3727 tree
3728 staticp (tree arg)
3730 switch (TREE_CODE (arg))
3732 case FUNCTION_DECL:
3733 /* Nested functions are static, even though taking their address will
3734 involve a trampoline as we unnest the nested function and create
3735 the trampoline on the tree level. */
3736 return arg;
3738 case VAR_DECL:
3739 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3740 && ! DECL_THREAD_LOCAL_P (arg)
3741 && ! DECL_DLLIMPORT_P (arg)
3742 ? arg : NULL);
3744 case CONST_DECL:
3745 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3746 ? arg : NULL);
3748 case CONSTRUCTOR:
3749 return TREE_STATIC (arg) ? arg : NULL;
3751 case LABEL_DECL:
3752 case STRING_CST:
3753 return arg;
3755 case COMPONENT_REF:
3756 /* If the thing being referenced is not a field, then it is
3757 something language specific. */
3758 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3760 /* If we are referencing a bitfield, we can't evaluate an
3761 ADDR_EXPR at compile time and so it isn't a constant. */
3762 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3763 return NULL;
3765 return staticp (TREE_OPERAND (arg, 0));
3767 case BIT_FIELD_REF:
3768 return NULL;
3770 case INDIRECT_REF:
3771 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3773 case ARRAY_REF:
3774 case ARRAY_RANGE_REF:
3775 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3776 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3777 return staticp (TREE_OPERAND (arg, 0));
3778 else
3779 return NULL;
3781 case COMPOUND_LITERAL_EXPR:
3782 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3784 default:
3785 return NULL;
3792 /* Return whether OP is a DECL whose address is function-invariant. */
3794 bool
3795 decl_address_invariant_p (const_tree op)
3797 /* The conditions below are slightly less strict than the one in
3798 staticp. */
3800 switch (TREE_CODE (op))
3802 case PARM_DECL:
3803 case RESULT_DECL:
3804 case LABEL_DECL:
3805 case FUNCTION_DECL:
3806 return true;
3808 case VAR_DECL:
3809 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3810 || DECL_THREAD_LOCAL_P (op)
3811 || DECL_CONTEXT (op) == current_function_decl
3812 || decl_function_context (op) == current_function_decl)
3813 return true;
3814 break;
3816 case CONST_DECL:
3817 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3818 || decl_function_context (op) == current_function_decl)
3819 return true;
3820 break;
3822 default:
3823 break;
3826 return false;
3829 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3831 bool
3832 decl_address_ip_invariant_p (const_tree op)
3834 /* The conditions below are slightly less strict than the one in
3835 staticp. */
3837 switch (TREE_CODE (op))
3839 case LABEL_DECL:
3840 case FUNCTION_DECL:
3841 case STRING_CST:
3842 return true;
3844 case VAR_DECL:
3845 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3846 && !DECL_DLLIMPORT_P (op))
3847 || DECL_THREAD_LOCAL_P (op))
3848 return true;
3849 break;
3851 case CONST_DECL:
3852 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3853 return true;
3854 break;
3856 default:
3857 break;
3860 return false;
3864 /* Return true if T is function-invariant (internal function, does
3865 not handle arithmetic; that's handled in skip_simple_arithmetic and
3866 tree_invariant_p). */
3868 static bool
3869 tree_invariant_p_1 (tree t)
3871 tree op;
3873 if (TREE_CONSTANT (t)
3874 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3875 return true;
3877 switch (TREE_CODE (t))
3879 case SAVE_EXPR:
3880 return true;
3882 case ADDR_EXPR:
3883 op = TREE_OPERAND (t, 0);
3884 while (handled_component_p (op))
3886 switch (TREE_CODE (op))
3888 case ARRAY_REF:
3889 case ARRAY_RANGE_REF:
3890 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3891 || TREE_OPERAND (op, 2) != NULL_TREE
3892 || TREE_OPERAND (op, 3) != NULL_TREE)
3893 return false;
3894 break;
3896 case COMPONENT_REF:
3897 if (TREE_OPERAND (op, 2) != NULL_TREE)
3898 return false;
3899 break;
3901 default:;
3903 op = TREE_OPERAND (op, 0);
3906 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3908 default:
3909 break;
3912 return false;
3915 /* Return true if T is function-invariant. */
3917 bool
3918 tree_invariant_p (tree t)
3920 tree inner = skip_simple_arithmetic (t);
3921 return tree_invariant_p_1 (inner);
3924 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3925 Do this to any expression which may be used in more than one place,
3926 but must be evaluated only once.
3928 Normally, expand_expr would reevaluate the expression each time.
3929 Calling save_expr produces something that is evaluated and recorded
3930 the first time expand_expr is called on it. Subsequent calls to
3931 expand_expr just reuse the recorded value.
3933 The call to expand_expr that generates code that actually computes
3934 the value is the first call *at compile time*. Subsequent calls
3935 *at compile time* generate code to use the saved value.
3936 This produces correct result provided that *at run time* control
3937 always flows through the insns made by the first expand_expr
3938 before reaching the other places where the save_expr was evaluated.
3939 You, the caller of save_expr, must make sure this is so.
3941 Constants, and certain read-only nodes, are returned with no
3942 SAVE_EXPR because that is safe. Expressions containing placeholders
3943 are not touched; see tree.def for an explanation of what these
3944 are used for. */
3946 tree
3947 save_expr (tree expr)
3949 tree inner;
3951 /* If the tree evaluates to a constant, then we don't want to hide that
3952 fact (i.e. this allows further folding, and direct checks for constants).
3953 However, a read-only object that has side effects cannot be bypassed.
3954 Since it is no problem to reevaluate literals, we just return the
3955 literal node. */
3956 inner = skip_simple_arithmetic (expr);
3957 if (TREE_CODE (inner) == ERROR_MARK)
3958 return inner;
3960 if (tree_invariant_p_1 (inner))
3961 return expr;
3963 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3964 it means that the size or offset of some field of an object depends on
3965 the value within another field.
3967 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3968 and some variable since it would then need to be both evaluated once and
3969 evaluated more than once. Front-ends must assure this case cannot
3970 happen by surrounding any such subexpressions in their own SAVE_EXPR
3971 and forcing evaluation at the proper time. */
3972 if (contains_placeholder_p (inner))
3973 return expr;
3975 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3977 /* This expression might be placed ahead of a jump to ensure that the
3978 value was computed on both sides of the jump. So make sure it isn't
3979 eliminated as dead. */
3980 TREE_SIDE_EFFECTS (expr) = 1;
3981 return expr;
3984 /* Look inside EXPR into any simple arithmetic operations. Return the
3985 outermost non-arithmetic or non-invariant node. */
3987 tree
3988 skip_simple_arithmetic (tree expr)
3990 /* We don't care about whether this can be used as an lvalue in this
3991 context. */
3992 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3993 expr = TREE_OPERAND (expr, 0);
3995 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3996 a constant, it will be more efficient to not make another SAVE_EXPR since
3997 it will allow better simplification and GCSE will be able to merge the
3998 computations if they actually occur. */
3999 while (true)
4001 if (UNARY_CLASS_P (expr))
4002 expr = TREE_OPERAND (expr, 0);
4003 else if (BINARY_CLASS_P (expr))
4005 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4006 expr = TREE_OPERAND (expr, 0);
4007 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4008 expr = TREE_OPERAND (expr, 1);
4009 else
4010 break;
4012 else
4013 break;
4016 return expr;
4019 /* Look inside EXPR into simple arithmetic operations involving constants.
4020 Return the outermost non-arithmetic or non-constant node. */
4022 tree
4023 skip_simple_constant_arithmetic (tree expr)
4025 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4026 expr = TREE_OPERAND (expr, 0);
4028 while (true)
4030 if (UNARY_CLASS_P (expr))
4031 expr = TREE_OPERAND (expr, 0);
4032 else if (BINARY_CLASS_P (expr))
4034 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4035 expr = TREE_OPERAND (expr, 0);
4036 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4037 expr = TREE_OPERAND (expr, 1);
4038 else
4039 break;
4041 else
4042 break;
4045 return expr;
4048 /* Return which tree structure is used by T. */
4050 enum tree_node_structure_enum
4051 tree_node_structure (const_tree t)
4053 const enum tree_code code = TREE_CODE (t);
4054 return tree_node_structure_for_code (code);
4057 /* Set various status flags when building a CALL_EXPR object T. */
4059 static void
4060 process_call_operands (tree t)
4062 bool side_effects = TREE_SIDE_EFFECTS (t);
4063 bool read_only = false;
4064 int i = call_expr_flags (t);
4066 /* Calls have side-effects, except those to const or pure functions. */
4067 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4068 side_effects = true;
4069 /* Propagate TREE_READONLY of arguments for const functions. */
4070 if (i & ECF_CONST)
4071 read_only = true;
4073 if (!side_effects || read_only)
4074 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4076 tree op = TREE_OPERAND (t, i);
4077 if (op && TREE_SIDE_EFFECTS (op))
4078 side_effects = true;
4079 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4080 read_only = false;
4083 TREE_SIDE_EFFECTS (t) = side_effects;
4084 TREE_READONLY (t) = read_only;
4087 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4088 size or offset that depends on a field within a record. */
4090 bool
4091 contains_placeholder_p (const_tree exp)
4093 enum tree_code code;
4095 if (!exp)
4096 return false;
4098 code = TREE_CODE (exp);
4099 if (code == PLACEHOLDER_EXPR)
4100 return true;
4102 switch (TREE_CODE_CLASS (code))
4104 case tcc_reference:
4105 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4106 position computations since they will be converted into a
4107 WITH_RECORD_EXPR involving the reference, which will assume
4108 here will be valid. */
4109 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4111 case tcc_exceptional:
4112 if (code == TREE_LIST)
4113 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4114 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4115 break;
4117 case tcc_unary:
4118 case tcc_binary:
4119 case tcc_comparison:
4120 case tcc_expression:
4121 switch (code)
4123 case COMPOUND_EXPR:
4124 /* Ignoring the first operand isn't quite right, but works best. */
4125 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4127 case COND_EXPR:
4128 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4129 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4130 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4132 case SAVE_EXPR:
4133 /* The save_expr function never wraps anything containing
4134 a PLACEHOLDER_EXPR. */
4135 return false;
4137 default:
4138 break;
4141 switch (TREE_CODE_LENGTH (code))
4143 case 1:
4144 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4145 case 2:
4146 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4147 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4148 default:
4149 return false;
4152 case tcc_vl_exp:
4153 switch (code)
4155 case CALL_EXPR:
4157 const_tree arg;
4158 const_call_expr_arg_iterator iter;
4159 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4160 if (CONTAINS_PLACEHOLDER_P (arg))
4161 return true;
4162 return false;
4164 default:
4165 return false;
4168 default:
4169 return false;
4171 return false;
4174 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4175 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4176 field positions. */
4178 static bool
4179 type_contains_placeholder_1 (const_tree type)
4181 /* If the size contains a placeholder or the parent type (component type in
4182 the case of arrays) type involves a placeholder, this type does. */
4183 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4184 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4185 || (!POINTER_TYPE_P (type)
4186 && TREE_TYPE (type)
4187 && type_contains_placeholder_p (TREE_TYPE (type))))
4188 return true;
4190 /* Now do type-specific checks. Note that the last part of the check above
4191 greatly limits what we have to do below. */
4192 switch (TREE_CODE (type))
4194 case VOID_TYPE:
4195 case OPAQUE_TYPE:
4196 case COMPLEX_TYPE:
4197 case ENUMERAL_TYPE:
4198 case BOOLEAN_TYPE:
4199 case POINTER_TYPE:
4200 case OFFSET_TYPE:
4201 case REFERENCE_TYPE:
4202 case METHOD_TYPE:
4203 case FUNCTION_TYPE:
4204 case VECTOR_TYPE:
4205 case NULLPTR_TYPE:
4206 return false;
4208 case INTEGER_TYPE:
4209 case REAL_TYPE:
4210 case FIXED_POINT_TYPE:
4211 /* Here we just check the bounds. */
4212 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4213 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4215 case ARRAY_TYPE:
4216 /* We have already checked the component type above, so just check
4217 the domain type. Flexible array members have a null domain. */
4218 return TYPE_DOMAIN (type) ?
4219 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4221 case RECORD_TYPE:
4222 case UNION_TYPE:
4223 case QUAL_UNION_TYPE:
4225 tree field;
4227 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4228 if (TREE_CODE (field) == FIELD_DECL
4229 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4230 || (TREE_CODE (type) == QUAL_UNION_TYPE
4231 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4232 || type_contains_placeholder_p (TREE_TYPE (field))))
4233 return true;
4235 return false;
4238 default:
4239 gcc_unreachable ();
4243 /* Wrapper around above function used to cache its result. */
4245 bool
4246 type_contains_placeholder_p (tree type)
4248 bool result;
4250 /* If the contains_placeholder_bits field has been initialized,
4251 then we know the answer. */
4252 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4253 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4255 /* Indicate that we've seen this type node, and the answer is false.
4256 This is what we want to return if we run into recursion via fields. */
4257 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4259 /* Compute the real value. */
4260 result = type_contains_placeholder_1 (type);
4262 /* Store the real value. */
4263 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4265 return result;
4268 /* Push tree EXP onto vector QUEUE if it is not already present. */
4270 static void
4271 push_without_duplicates (tree exp, vec<tree> *queue)
4273 unsigned int i;
4274 tree iter;
4276 FOR_EACH_VEC_ELT (*queue, i, iter)
4277 if (simple_cst_equal (iter, exp) == 1)
4278 break;
4280 if (!iter)
4281 queue->safe_push (exp);
4284 /* Given a tree EXP, find all occurrences of references to fields
4285 in a PLACEHOLDER_EXPR and place them in vector REFS without
4286 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4287 we assume here that EXP contains only arithmetic expressions
4288 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4289 argument list. */
4291 void
4292 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4294 enum tree_code code = TREE_CODE (exp);
4295 tree inner;
4296 int i;
4298 /* We handle TREE_LIST and COMPONENT_REF separately. */
4299 if (code == TREE_LIST)
4301 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4302 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4304 else if (code == COMPONENT_REF)
4306 for (inner = TREE_OPERAND (exp, 0);
4307 REFERENCE_CLASS_P (inner);
4308 inner = TREE_OPERAND (inner, 0))
4311 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4312 push_without_duplicates (exp, refs);
4313 else
4314 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4316 else
4317 switch (TREE_CODE_CLASS (code))
4319 case tcc_constant:
4320 break;
4322 case tcc_declaration:
4323 /* Variables allocated to static storage can stay. */
4324 if (!TREE_STATIC (exp))
4325 push_without_duplicates (exp, refs);
4326 break;
4328 case tcc_expression:
4329 /* This is the pattern built in ada/make_aligning_type. */
4330 if (code == ADDR_EXPR
4331 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4333 push_without_duplicates (exp, refs);
4334 break;
4337 /* Fall through. */
4339 case tcc_exceptional:
4340 case tcc_unary:
4341 case tcc_binary:
4342 case tcc_comparison:
4343 case tcc_reference:
4344 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4345 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4346 break;
4348 case tcc_vl_exp:
4349 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4350 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4351 break;
4353 default:
4354 gcc_unreachable ();
4358 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4359 return a tree with all occurrences of references to F in a
4360 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4361 CONST_DECLs. Note that we assume here that EXP contains only
4362 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4363 occurring only in their argument list. */
4365 tree
4366 substitute_in_expr (tree exp, tree f, tree r)
4368 enum tree_code code = TREE_CODE (exp);
4369 tree op0, op1, op2, op3;
4370 tree new_tree;
4372 /* We handle TREE_LIST and COMPONENT_REF separately. */
4373 if (code == TREE_LIST)
4375 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4376 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4377 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4378 return exp;
4380 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4382 else if (code == COMPONENT_REF)
4384 tree inner;
4386 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4387 and it is the right field, replace it with R. */
4388 for (inner = TREE_OPERAND (exp, 0);
4389 REFERENCE_CLASS_P (inner);
4390 inner = TREE_OPERAND (inner, 0))
4393 /* The field. */
4394 op1 = TREE_OPERAND (exp, 1);
4396 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4397 return r;
4399 /* If this expression hasn't been completed let, leave it alone. */
4400 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4401 return exp;
4403 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4404 if (op0 == TREE_OPERAND (exp, 0))
4405 return exp;
4407 new_tree
4408 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4410 else
4411 switch (TREE_CODE_CLASS (code))
4413 case tcc_constant:
4414 return exp;
4416 case tcc_declaration:
4417 if (exp == f)
4418 return r;
4419 else
4420 return exp;
4422 case tcc_expression:
4423 if (exp == f)
4424 return r;
4426 /* Fall through. */
4428 case tcc_exceptional:
4429 case tcc_unary:
4430 case tcc_binary:
4431 case tcc_comparison:
4432 case tcc_reference:
4433 switch (TREE_CODE_LENGTH (code))
4435 case 0:
4436 return exp;
4438 case 1:
4439 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4440 if (op0 == TREE_OPERAND (exp, 0))
4441 return exp;
4443 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4444 break;
4446 case 2:
4447 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4448 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4450 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4451 return exp;
4453 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4454 break;
4456 case 3:
4457 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4458 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4459 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4461 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4462 && op2 == TREE_OPERAND (exp, 2))
4463 return exp;
4465 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4466 break;
4468 case 4:
4469 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4470 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4471 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4472 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4474 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4475 && op2 == TREE_OPERAND (exp, 2)
4476 && op3 == TREE_OPERAND (exp, 3))
4477 return exp;
4479 new_tree
4480 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4481 break;
4483 default:
4484 gcc_unreachable ();
4486 break;
4488 case tcc_vl_exp:
4490 int i;
4492 new_tree = NULL_TREE;
4494 /* If we are trying to replace F with a constant or with another
4495 instance of one of the arguments of the call, inline back
4496 functions which do nothing else than computing a value from
4497 the arguments they are passed. This makes it possible to
4498 fold partially or entirely the replacement expression. */
4499 if (code == CALL_EXPR)
4501 bool maybe_inline = false;
4502 if (CONSTANT_CLASS_P (r))
4503 maybe_inline = true;
4504 else
4505 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4506 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4508 maybe_inline = true;
4509 break;
4511 if (maybe_inline)
4513 tree t = maybe_inline_call_in_expr (exp);
4514 if (t)
4515 return SUBSTITUTE_IN_EXPR (t, f, r);
4519 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4521 tree op = TREE_OPERAND (exp, i);
4522 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4523 if (new_op != op)
4525 if (!new_tree)
4526 new_tree = copy_node (exp);
4527 TREE_OPERAND (new_tree, i) = new_op;
4531 if (new_tree)
4533 new_tree = fold (new_tree);
4534 if (TREE_CODE (new_tree) == CALL_EXPR)
4535 process_call_operands (new_tree);
4537 else
4538 return exp;
4540 break;
4542 default:
4543 gcc_unreachable ();
4546 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4548 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4549 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4551 return new_tree;
4554 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4555 for it within OBJ, a tree that is an object or a chain of references. */
4557 tree
4558 substitute_placeholder_in_expr (tree exp, tree obj)
4560 enum tree_code code = TREE_CODE (exp);
4561 tree op0, op1, op2, op3;
4562 tree new_tree;
4564 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4565 in the chain of OBJ. */
4566 if (code == PLACEHOLDER_EXPR)
4568 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4569 tree elt;
4571 for (elt = obj; elt != 0;
4572 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4573 || TREE_CODE (elt) == COND_EXPR)
4574 ? TREE_OPERAND (elt, 1)
4575 : (REFERENCE_CLASS_P (elt)
4576 || UNARY_CLASS_P (elt)
4577 || BINARY_CLASS_P (elt)
4578 || VL_EXP_CLASS_P (elt)
4579 || EXPRESSION_CLASS_P (elt))
4580 ? TREE_OPERAND (elt, 0) : 0))
4581 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4582 return elt;
4584 for (elt = obj; elt != 0;
4585 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4586 || TREE_CODE (elt) == COND_EXPR)
4587 ? TREE_OPERAND (elt, 1)
4588 : (REFERENCE_CLASS_P (elt)
4589 || UNARY_CLASS_P (elt)
4590 || BINARY_CLASS_P (elt)
4591 || VL_EXP_CLASS_P (elt)
4592 || EXPRESSION_CLASS_P (elt))
4593 ? TREE_OPERAND (elt, 0) : 0))
4594 if (POINTER_TYPE_P (TREE_TYPE (elt))
4595 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4596 == need_type))
4597 return fold_build1 (INDIRECT_REF, need_type, elt);
4599 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4600 survives until RTL generation, there will be an error. */
4601 return exp;
4604 /* TREE_LIST is special because we need to look at TREE_VALUE
4605 and TREE_CHAIN, not TREE_OPERANDS. */
4606 else if (code == TREE_LIST)
4608 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4609 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4610 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4611 return exp;
4613 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4615 else
4616 switch (TREE_CODE_CLASS (code))
4618 case tcc_constant:
4619 case tcc_declaration:
4620 return exp;
4622 case tcc_exceptional:
4623 case tcc_unary:
4624 case tcc_binary:
4625 case tcc_comparison:
4626 case tcc_expression:
4627 case tcc_reference:
4628 case tcc_statement:
4629 switch (TREE_CODE_LENGTH (code))
4631 case 0:
4632 return exp;
4634 case 1:
4635 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4636 if (op0 == TREE_OPERAND (exp, 0))
4637 return exp;
4639 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4640 break;
4642 case 2:
4643 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4644 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4646 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4647 return exp;
4649 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4650 break;
4652 case 3:
4653 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4654 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4655 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4657 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4658 && op2 == TREE_OPERAND (exp, 2))
4659 return exp;
4661 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4662 break;
4664 case 4:
4665 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4666 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4667 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4668 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4670 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4671 && op2 == TREE_OPERAND (exp, 2)
4672 && op3 == TREE_OPERAND (exp, 3))
4673 return exp;
4675 new_tree
4676 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4677 break;
4679 default:
4680 gcc_unreachable ();
4682 break;
4684 case tcc_vl_exp:
4686 int i;
4688 new_tree = NULL_TREE;
4690 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4692 tree op = TREE_OPERAND (exp, i);
4693 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4694 if (new_op != op)
4696 if (!new_tree)
4697 new_tree = copy_node (exp);
4698 TREE_OPERAND (new_tree, i) = new_op;
4702 if (new_tree)
4704 new_tree = fold (new_tree);
4705 if (TREE_CODE (new_tree) == CALL_EXPR)
4706 process_call_operands (new_tree);
4708 else
4709 return exp;
4711 break;
4713 default:
4714 gcc_unreachable ();
4717 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4719 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4720 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4722 return new_tree;
4726 /* Subroutine of stabilize_reference; this is called for subtrees of
4727 references. Any expression with side-effects must be put in a SAVE_EXPR
4728 to ensure that it is only evaluated once.
4730 We don't put SAVE_EXPR nodes around everything, because assigning very
4731 simple expressions to temporaries causes us to miss good opportunities
4732 for optimizations. Among other things, the opportunity to fold in the
4733 addition of a constant into an addressing mode often gets lost, e.g.
4734 "y[i+1] += x;". In general, we take the approach that we should not make
4735 an assignment unless we are forced into it - i.e., that any non-side effect
4736 operator should be allowed, and that cse should take care of coalescing
4737 multiple utterances of the same expression should that prove fruitful. */
4739 static tree
4740 stabilize_reference_1 (tree e)
4742 tree result;
4743 enum tree_code code = TREE_CODE (e);
4745 /* We cannot ignore const expressions because it might be a reference
4746 to a const array but whose index contains side-effects. But we can
4747 ignore things that are actual constant or that already have been
4748 handled by this function. */
4750 if (tree_invariant_p (e))
4751 return e;
4753 switch (TREE_CODE_CLASS (code))
4755 case tcc_exceptional:
4756 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4757 have side-effects. */
4758 if (code == STATEMENT_LIST)
4759 return save_expr (e);
4760 /* FALLTHRU */
4761 case tcc_type:
4762 case tcc_declaration:
4763 case tcc_comparison:
4764 case tcc_statement:
4765 case tcc_expression:
4766 case tcc_reference:
4767 case tcc_vl_exp:
4768 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4769 so that it will only be evaluated once. */
4770 /* The reference (r) and comparison (<) classes could be handled as
4771 below, but it is generally faster to only evaluate them once. */
4772 if (TREE_SIDE_EFFECTS (e))
4773 return save_expr (e);
4774 return e;
4776 case tcc_constant:
4777 /* Constants need no processing. In fact, we should never reach
4778 here. */
4779 return e;
4781 case tcc_binary:
4782 /* Division is slow and tends to be compiled with jumps,
4783 especially the division by powers of 2 that is often
4784 found inside of an array reference. So do it just once. */
4785 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4786 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4787 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4788 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4789 return save_expr (e);
4790 /* Recursively stabilize each operand. */
4791 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4792 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4793 break;
4795 case tcc_unary:
4796 /* Recursively stabilize each operand. */
4797 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4798 break;
4800 default:
4801 gcc_unreachable ();
4804 TREE_TYPE (result) = TREE_TYPE (e);
4805 TREE_READONLY (result) = TREE_READONLY (e);
4806 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4807 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4809 return result;
4812 /* Stabilize a reference so that we can use it any number of times
4813 without causing its operands to be evaluated more than once.
4814 Returns the stabilized reference. This works by means of save_expr,
4815 so see the caveats in the comments about save_expr.
4817 Also allows conversion expressions whose operands are references.
4818 Any other kind of expression is returned unchanged. */
4820 tree
4821 stabilize_reference (tree ref)
4823 tree result;
4824 enum tree_code code = TREE_CODE (ref);
4826 switch (code)
4828 case VAR_DECL:
4829 case PARM_DECL:
4830 case RESULT_DECL:
4831 /* No action is needed in this case. */
4832 return ref;
4834 CASE_CONVERT:
4835 case FLOAT_EXPR:
4836 case FIX_TRUNC_EXPR:
4837 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4838 break;
4840 case INDIRECT_REF:
4841 result = build_nt (INDIRECT_REF,
4842 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4843 break;
4845 case COMPONENT_REF:
4846 result = build_nt (COMPONENT_REF,
4847 stabilize_reference (TREE_OPERAND (ref, 0)),
4848 TREE_OPERAND (ref, 1), NULL_TREE);
4849 break;
4851 case BIT_FIELD_REF:
4852 result = build_nt (BIT_FIELD_REF,
4853 stabilize_reference (TREE_OPERAND (ref, 0)),
4854 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4855 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4856 break;
4858 case ARRAY_REF:
4859 result = build_nt (ARRAY_REF,
4860 stabilize_reference (TREE_OPERAND (ref, 0)),
4861 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4862 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4863 break;
4865 case ARRAY_RANGE_REF:
4866 result = build_nt (ARRAY_RANGE_REF,
4867 stabilize_reference (TREE_OPERAND (ref, 0)),
4868 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4869 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4870 break;
4872 case COMPOUND_EXPR:
4873 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4874 it wouldn't be ignored. This matters when dealing with
4875 volatiles. */
4876 return stabilize_reference_1 (ref);
4878 /* If arg isn't a kind of lvalue we recognize, make no change.
4879 Caller should recognize the error for an invalid lvalue. */
4880 default:
4881 return ref;
4883 case ERROR_MARK:
4884 return error_mark_node;
4887 TREE_TYPE (result) = TREE_TYPE (ref);
4888 TREE_READONLY (result) = TREE_READONLY (ref);
4889 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4890 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4891 protected_set_expr_location (result, EXPR_LOCATION (ref));
4893 return result;
4896 /* Low-level constructors for expressions. */
4898 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4899 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4901 void
4902 recompute_tree_invariant_for_addr_expr (tree t)
4904 tree node;
4905 bool tc = true, se = false;
4907 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4909 /* We started out assuming this address is both invariant and constant, but
4910 does not have side effects. Now go down any handled components and see if
4911 any of them involve offsets that are either non-constant or non-invariant.
4912 Also check for side-effects.
4914 ??? Note that this code makes no attempt to deal with the case where
4915 taking the address of something causes a copy due to misalignment. */
4917 #define UPDATE_FLAGS(NODE) \
4918 do { tree _node = (NODE); \
4919 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4920 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4922 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4923 node = TREE_OPERAND (node, 0))
4925 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4926 array reference (probably made temporarily by the G++ front end),
4927 so ignore all the operands. */
4928 if ((TREE_CODE (node) == ARRAY_REF
4929 || TREE_CODE (node) == ARRAY_RANGE_REF)
4930 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4932 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4933 if (TREE_OPERAND (node, 2))
4934 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4935 if (TREE_OPERAND (node, 3))
4936 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4938 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4939 FIELD_DECL, apparently. The G++ front end can put something else
4940 there, at least temporarily. */
4941 else if (TREE_CODE (node) == COMPONENT_REF
4942 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4944 if (TREE_OPERAND (node, 2))
4945 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4949 node = lang_hooks.expr_to_decl (node, &tc, &se);
4951 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4952 the address, since &(*a)->b is a form of addition. If it's a constant, the
4953 address is constant too. If it's a decl, its address is constant if the
4954 decl is static. Everything else is not constant and, furthermore,
4955 taking the address of a volatile variable is not volatile. */
4956 if (INDIRECT_REF_P (node)
4957 || TREE_CODE (node) == MEM_REF)
4958 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4959 else if (CONSTANT_CLASS_P (node))
4961 else if (DECL_P (node))
4962 tc &= (staticp (node) != NULL_TREE);
4963 else
4965 tc = false;
4966 se |= TREE_SIDE_EFFECTS (node);
4970 TREE_CONSTANT (t) = tc;
4971 TREE_SIDE_EFFECTS (t) = se;
4972 #undef UPDATE_FLAGS
4975 /* Build an expression of code CODE, data type TYPE, and operands as
4976 specified. Expressions and reference nodes can be created this way.
4977 Constants, decls, types and misc nodes cannot be.
4979 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4980 enough for all extant tree codes. */
4982 tree
4983 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4985 tree t;
4987 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4989 t = make_node (code PASS_MEM_STAT);
4990 TREE_TYPE (t) = tt;
4992 return t;
4995 tree
4996 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4998 int length = sizeof (struct tree_exp);
4999 tree t;
5001 record_node_allocation_statistics (code, length);
5003 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5005 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5007 memset (t, 0, sizeof (struct tree_common));
5009 TREE_SET_CODE (t, code);
5011 TREE_TYPE (t) = type;
5012 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5013 TREE_OPERAND (t, 0) = node;
5014 if (node && !TYPE_P (node))
5016 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5017 TREE_READONLY (t) = TREE_READONLY (node);
5020 if (TREE_CODE_CLASS (code) == tcc_statement)
5022 if (code != DEBUG_BEGIN_STMT)
5023 TREE_SIDE_EFFECTS (t) = 1;
5025 else switch (code)
5027 case VA_ARG_EXPR:
5028 /* All of these have side-effects, no matter what their
5029 operands are. */
5030 TREE_SIDE_EFFECTS (t) = 1;
5031 TREE_READONLY (t) = 0;
5032 break;
5034 case INDIRECT_REF:
5035 /* Whether a dereference is readonly has nothing to do with whether
5036 its operand is readonly. */
5037 TREE_READONLY (t) = 0;
5038 break;
5040 case ADDR_EXPR:
5041 if (node)
5042 recompute_tree_invariant_for_addr_expr (t);
5043 break;
5045 default:
5046 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5047 && node && !TYPE_P (node)
5048 && TREE_CONSTANT (node))
5049 TREE_CONSTANT (t) = 1;
5050 if (TREE_CODE_CLASS (code) == tcc_reference
5051 && node && TREE_THIS_VOLATILE (node))
5052 TREE_THIS_VOLATILE (t) = 1;
5053 break;
5056 return t;
5059 #define PROCESS_ARG(N) \
5060 do { \
5061 TREE_OPERAND (t, N) = arg##N; \
5062 if (arg##N &&!TYPE_P (arg##N)) \
5064 if (TREE_SIDE_EFFECTS (arg##N)) \
5065 side_effects = 1; \
5066 if (!TREE_READONLY (arg##N) \
5067 && !CONSTANT_CLASS_P (arg##N)) \
5068 (void) (read_only = 0); \
5069 if (!TREE_CONSTANT (arg##N)) \
5070 (void) (constant = 0); \
5072 } while (0)
5074 tree
5075 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5077 bool constant, read_only, side_effects, div_by_zero;
5078 tree t;
5080 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5082 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5083 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5084 /* When sizetype precision doesn't match that of pointers
5085 we need to be able to build explicit extensions or truncations
5086 of the offset argument. */
5087 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5088 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5089 && TREE_CODE (arg1) == INTEGER_CST);
5091 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5092 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5093 && ptrofftype_p (TREE_TYPE (arg1)));
5095 t = make_node (code PASS_MEM_STAT);
5096 TREE_TYPE (t) = tt;
5098 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5099 result based on those same flags for the arguments. But if the
5100 arguments aren't really even `tree' expressions, we shouldn't be trying
5101 to do this. */
5103 /* Expressions without side effects may be constant if their
5104 arguments are as well. */
5105 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5106 || TREE_CODE_CLASS (code) == tcc_binary);
5107 read_only = 1;
5108 side_effects = TREE_SIDE_EFFECTS (t);
5110 switch (code)
5112 case TRUNC_DIV_EXPR:
5113 case CEIL_DIV_EXPR:
5114 case FLOOR_DIV_EXPR:
5115 case ROUND_DIV_EXPR:
5116 case EXACT_DIV_EXPR:
5117 case CEIL_MOD_EXPR:
5118 case FLOOR_MOD_EXPR:
5119 case ROUND_MOD_EXPR:
5120 case TRUNC_MOD_EXPR:
5121 div_by_zero = integer_zerop (arg1);
5122 break;
5123 default:
5124 div_by_zero = false;
5127 PROCESS_ARG (0);
5128 PROCESS_ARG (1);
5130 TREE_SIDE_EFFECTS (t) = side_effects;
5131 if (code == MEM_REF)
5133 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5135 tree o = TREE_OPERAND (arg0, 0);
5136 TREE_READONLY (t) = TREE_READONLY (o);
5137 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5140 else
5142 TREE_READONLY (t) = read_only;
5143 /* Don't mark X / 0 as constant. */
5144 TREE_CONSTANT (t) = constant && !div_by_zero;
5145 TREE_THIS_VOLATILE (t)
5146 = (TREE_CODE_CLASS (code) == tcc_reference
5147 && arg0 && TREE_THIS_VOLATILE (arg0));
5150 return t;
5154 tree
5155 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5156 tree arg2 MEM_STAT_DECL)
5158 bool constant, read_only, side_effects;
5159 tree t;
5161 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5162 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5164 t = make_node (code PASS_MEM_STAT);
5165 TREE_TYPE (t) = tt;
5167 read_only = 1;
5169 /* As a special exception, if COND_EXPR has NULL branches, we
5170 assume that it is a gimple statement and always consider
5171 it to have side effects. */
5172 if (code == COND_EXPR
5173 && tt == void_type_node
5174 && arg1 == NULL_TREE
5175 && arg2 == NULL_TREE)
5176 side_effects = true;
5177 else
5178 side_effects = TREE_SIDE_EFFECTS (t);
5180 PROCESS_ARG (0);
5181 PROCESS_ARG (1);
5182 PROCESS_ARG (2);
5184 if (code == COND_EXPR)
5185 TREE_READONLY (t) = read_only;
5187 TREE_SIDE_EFFECTS (t) = side_effects;
5188 TREE_THIS_VOLATILE (t)
5189 = (TREE_CODE_CLASS (code) == tcc_reference
5190 && arg0 && TREE_THIS_VOLATILE (arg0));
5192 return t;
5195 tree
5196 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5197 tree arg2, tree arg3 MEM_STAT_DECL)
5199 bool constant, read_only, side_effects;
5200 tree t;
5202 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5204 t = make_node (code PASS_MEM_STAT);
5205 TREE_TYPE (t) = tt;
5207 side_effects = TREE_SIDE_EFFECTS (t);
5209 PROCESS_ARG (0);
5210 PROCESS_ARG (1);
5211 PROCESS_ARG (2);
5212 PROCESS_ARG (3);
5214 TREE_SIDE_EFFECTS (t) = side_effects;
5215 TREE_THIS_VOLATILE (t)
5216 = (TREE_CODE_CLASS (code) == tcc_reference
5217 && arg0 && TREE_THIS_VOLATILE (arg0));
5219 return t;
5222 tree
5223 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5224 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5226 bool constant, read_only, side_effects;
5227 tree t;
5229 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5231 t = make_node (code PASS_MEM_STAT);
5232 TREE_TYPE (t) = tt;
5234 side_effects = TREE_SIDE_EFFECTS (t);
5236 PROCESS_ARG (0);
5237 PROCESS_ARG (1);
5238 PROCESS_ARG (2);
5239 PROCESS_ARG (3);
5240 PROCESS_ARG (4);
5242 TREE_SIDE_EFFECTS (t) = side_effects;
5243 if (code == TARGET_MEM_REF)
5245 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5247 tree o = TREE_OPERAND (arg0, 0);
5248 TREE_READONLY (t) = TREE_READONLY (o);
5249 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5252 else
5253 TREE_THIS_VOLATILE (t)
5254 = (TREE_CODE_CLASS (code) == tcc_reference
5255 && arg0 && TREE_THIS_VOLATILE (arg0));
5257 return t;
5260 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5261 on the pointer PTR. */
5263 tree
5264 build_simple_mem_ref_loc (location_t loc, tree ptr)
5266 poly_int64 offset = 0;
5267 tree ptype = TREE_TYPE (ptr);
5268 tree tem;
5269 /* For convenience allow addresses that collapse to a simple base
5270 and offset. */
5271 if (TREE_CODE (ptr) == ADDR_EXPR
5272 && (handled_component_p (TREE_OPERAND (ptr, 0))
5273 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5275 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5276 gcc_assert (ptr);
5277 if (TREE_CODE (ptr) == MEM_REF)
5279 offset += mem_ref_offset (ptr).force_shwi ();
5280 ptr = TREE_OPERAND (ptr, 0);
5282 else
5283 ptr = build_fold_addr_expr (ptr);
5284 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5286 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5287 ptr, build_int_cst (ptype, offset));
5288 SET_EXPR_LOCATION (tem, loc);
5289 return tem;
5292 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5294 poly_offset_int
5295 mem_ref_offset (const_tree t)
5297 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5298 SIGNED);
5301 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5302 offsetted by OFFSET units. */
5304 tree
5305 build_invariant_address (tree type, tree base, poly_int64 offset)
5307 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5308 build_fold_addr_expr (base),
5309 build_int_cst (ptr_type_node, offset));
5310 tree addr = build1 (ADDR_EXPR, type, ref);
5311 recompute_tree_invariant_for_addr_expr (addr);
5312 return addr;
5315 /* Similar except don't specify the TREE_TYPE
5316 and leave the TREE_SIDE_EFFECTS as 0.
5317 It is permissible for arguments to be null,
5318 or even garbage if their values do not matter. */
5320 tree
5321 build_nt (enum tree_code code, ...)
5323 tree t;
5324 int length;
5325 int i;
5326 va_list p;
5328 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5330 va_start (p, code);
5332 t = make_node (code);
5333 length = TREE_CODE_LENGTH (code);
5335 for (i = 0; i < length; i++)
5336 TREE_OPERAND (t, i) = va_arg (p, tree);
5338 va_end (p);
5339 return t;
5342 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5343 tree vec. */
5345 tree
5346 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5348 tree ret, t;
5349 unsigned int ix;
5351 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5352 CALL_EXPR_FN (ret) = fn;
5353 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5354 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5355 CALL_EXPR_ARG (ret, ix) = t;
5356 return ret;
5359 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5360 and data type TYPE.
5361 We do NOT enter this node in any sort of symbol table.
5363 LOC is the location of the decl.
5365 layout_decl is used to set up the decl's storage layout.
5366 Other slots are initialized to 0 or null pointers. */
5368 tree
5369 build_decl (location_t loc, enum tree_code code, tree name,
5370 tree type MEM_STAT_DECL)
5372 tree t;
5374 t = make_node (code PASS_MEM_STAT);
5375 DECL_SOURCE_LOCATION (t) = loc;
5377 /* if (type == error_mark_node)
5378 type = integer_type_node; */
5379 /* That is not done, deliberately, so that having error_mark_node
5380 as the type can suppress useless errors in the use of this variable. */
5382 DECL_NAME (t) = name;
5383 TREE_TYPE (t) = type;
5385 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5386 layout_decl (t, 0);
5388 return t;
5391 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5393 tree
5394 build_debug_expr_decl (tree type)
5396 tree vexpr = make_node (DEBUG_EXPR_DECL);
5397 DECL_ARTIFICIAL (vexpr) = 1;
5398 TREE_TYPE (vexpr) = type;
5399 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5400 return vexpr;
5403 /* Builds and returns function declaration with NAME and TYPE. */
5405 tree
5406 build_fn_decl (const char *name, tree type)
5408 tree id = get_identifier (name);
5409 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5411 DECL_EXTERNAL (decl) = 1;
5412 TREE_PUBLIC (decl) = 1;
5413 DECL_ARTIFICIAL (decl) = 1;
5414 TREE_NOTHROW (decl) = 1;
5416 return decl;
5419 vec<tree, va_gc> *all_translation_units;
5421 /* Builds a new translation-unit decl with name NAME, queues it in the
5422 global list of translation-unit decls and returns it. */
5424 tree
5425 build_translation_unit_decl (tree name)
5427 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5428 name, NULL_TREE);
5429 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5430 vec_safe_push (all_translation_units, tu);
5431 return tu;
5435 /* BLOCK nodes are used to represent the structure of binding contours
5436 and declarations, once those contours have been exited and their contents
5437 compiled. This information is used for outputting debugging info. */
5439 tree
5440 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5442 tree block = make_node (BLOCK);
5444 BLOCK_VARS (block) = vars;
5445 BLOCK_SUBBLOCKS (block) = subblocks;
5446 BLOCK_SUPERCONTEXT (block) = supercontext;
5447 BLOCK_CHAIN (block) = chain;
5448 return block;
5452 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5454 LOC is the location to use in tree T. */
5456 void
5457 protected_set_expr_location (tree t, location_t loc)
5459 if (CAN_HAVE_LOCATION_P (t))
5460 SET_EXPR_LOCATION (t, loc);
5461 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5463 t = expr_single (t);
5464 if (t && CAN_HAVE_LOCATION_P (t))
5465 SET_EXPR_LOCATION (t, loc);
5469 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5470 UNKNOWN_LOCATION. */
5472 void
5473 protected_set_expr_location_if_unset (tree t, location_t loc)
5475 t = expr_single (t);
5476 if (t && !EXPR_HAS_LOCATION (t))
5477 protected_set_expr_location (t, loc);
5480 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5481 of the various TYPE_QUAL values. */
5483 static void
5484 set_type_quals (tree type, int type_quals)
5486 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5487 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5488 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5489 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5490 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5493 /* Returns true iff CAND and BASE have equivalent language-specific
5494 qualifiers. */
5496 bool
5497 check_lang_type (const_tree cand, const_tree base)
5499 if (lang_hooks.types.type_hash_eq == NULL)
5500 return true;
5501 /* type_hash_eq currently only applies to these types. */
5502 if (TREE_CODE (cand) != FUNCTION_TYPE
5503 && TREE_CODE (cand) != METHOD_TYPE)
5504 return true;
5505 return lang_hooks.types.type_hash_eq (cand, base);
5508 /* This function checks to see if TYPE matches the size one of the built-in
5509 atomic types, and returns that core atomic type. */
5511 static tree
5512 find_atomic_core_type (const_tree type)
5514 tree base_atomic_type;
5516 /* Only handle complete types. */
5517 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5518 return NULL_TREE;
5520 switch (tree_to_uhwi (TYPE_SIZE (type)))
5522 case 8:
5523 base_atomic_type = atomicQI_type_node;
5524 break;
5526 case 16:
5527 base_atomic_type = atomicHI_type_node;
5528 break;
5530 case 32:
5531 base_atomic_type = atomicSI_type_node;
5532 break;
5534 case 64:
5535 base_atomic_type = atomicDI_type_node;
5536 break;
5538 case 128:
5539 base_atomic_type = atomicTI_type_node;
5540 break;
5542 default:
5543 base_atomic_type = NULL_TREE;
5546 return base_atomic_type;
5549 /* Returns true iff unqualified CAND and BASE are equivalent. */
5551 bool
5552 check_base_type (const_tree cand, const_tree base)
5554 if (TYPE_NAME (cand) != TYPE_NAME (base)
5555 /* Apparently this is needed for Objective-C. */
5556 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5557 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5558 TYPE_ATTRIBUTES (base)))
5559 return false;
5560 /* Check alignment. */
5561 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5562 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5563 return true;
5564 /* Atomic types increase minimal alignment. We must to do so as well
5565 or we get duplicated canonical types. See PR88686. */
5566 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5568 /* See if this object can map to a basic atomic type. */
5569 tree atomic_type = find_atomic_core_type (cand);
5570 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5571 return true;
5573 return false;
5576 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5578 bool
5579 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5581 return (TYPE_QUALS (cand) == type_quals
5582 && check_base_type (cand, base)
5583 && check_lang_type (cand, base));
5586 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5588 static bool
5589 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5591 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5592 && TYPE_NAME (cand) == TYPE_NAME (base)
5593 /* Apparently this is needed for Objective-C. */
5594 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5595 /* Check alignment. */
5596 && TYPE_ALIGN (cand) == align
5597 /* Check this is a user-aligned type as build_aligned_type
5598 would create. */
5599 && TYPE_USER_ALIGN (cand)
5600 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5601 TYPE_ATTRIBUTES (base))
5602 && check_lang_type (cand, base));
5605 /* Return a version of the TYPE, qualified as indicated by the
5606 TYPE_QUALS, if one exists. If no qualified version exists yet,
5607 return NULL_TREE. */
5609 tree
5610 get_qualified_type (tree type, int type_quals)
5612 if (TYPE_QUALS (type) == type_quals)
5613 return type;
5615 tree mv = TYPE_MAIN_VARIANT (type);
5616 if (check_qualified_type (mv, type, type_quals))
5617 return mv;
5619 /* Search the chain of variants to see if there is already one there just
5620 like the one we need to have. If so, use that existing one. We must
5621 preserve the TYPE_NAME, since there is code that depends on this. */
5622 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5623 if (check_qualified_type (*tp, type, type_quals))
5625 /* Put the found variant at the head of the variant list so
5626 frequently searched variants get found faster. The C++ FE
5627 benefits greatly from this. */
5628 tree t = *tp;
5629 *tp = TYPE_NEXT_VARIANT (t);
5630 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5631 TYPE_NEXT_VARIANT (mv) = t;
5632 return t;
5635 return NULL_TREE;
5638 /* Like get_qualified_type, but creates the type if it does not
5639 exist. This function never returns NULL_TREE. */
5641 tree
5642 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5644 tree t;
5646 /* See if we already have the appropriate qualified variant. */
5647 t = get_qualified_type (type, type_quals);
5649 /* If not, build it. */
5650 if (!t)
5652 t = build_variant_type_copy (type PASS_MEM_STAT);
5653 set_type_quals (t, type_quals);
5655 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5657 /* See if this object can map to a basic atomic type. */
5658 tree atomic_type = find_atomic_core_type (type);
5659 if (atomic_type)
5661 /* Ensure the alignment of this type is compatible with
5662 the required alignment of the atomic type. */
5663 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5664 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5668 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5669 /* Propagate structural equality. */
5670 SET_TYPE_STRUCTURAL_EQUALITY (t);
5671 else if (TYPE_CANONICAL (type) != type)
5672 /* Build the underlying canonical type, since it is different
5673 from TYPE. */
5675 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5676 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5678 else
5679 /* T is its own canonical type. */
5680 TYPE_CANONICAL (t) = t;
5684 return t;
5687 /* Create a variant of type T with alignment ALIGN. */
5689 tree
5690 build_aligned_type (tree type, unsigned int align)
5692 tree t;
5694 if (TYPE_PACKED (type)
5695 || TYPE_ALIGN (type) == align)
5696 return type;
5698 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5699 if (check_aligned_type (t, type, align))
5700 return t;
5702 t = build_variant_type_copy (type);
5703 SET_TYPE_ALIGN (t, align);
5704 TYPE_USER_ALIGN (t) = 1;
5706 return t;
5709 /* Create a new distinct copy of TYPE. The new type is made its own
5710 MAIN_VARIANT. If TYPE requires structural equality checks, the
5711 resulting type requires structural equality checks; otherwise, its
5712 TYPE_CANONICAL points to itself. */
5714 tree
5715 build_distinct_type_copy (tree type MEM_STAT_DECL)
5717 tree t = copy_node (type PASS_MEM_STAT);
5719 TYPE_POINTER_TO (t) = 0;
5720 TYPE_REFERENCE_TO (t) = 0;
5722 /* Set the canonical type either to a new equivalence class, or
5723 propagate the need for structural equality checks. */
5724 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5725 SET_TYPE_STRUCTURAL_EQUALITY (t);
5726 else
5727 TYPE_CANONICAL (t) = t;
5729 /* Make it its own variant. */
5730 TYPE_MAIN_VARIANT (t) = t;
5731 TYPE_NEXT_VARIANT (t) = 0;
5733 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5734 whose TREE_TYPE is not t. This can also happen in the Ada
5735 frontend when using subtypes. */
5737 return t;
5740 /* Create a new variant of TYPE, equivalent but distinct. This is so
5741 the caller can modify it. TYPE_CANONICAL for the return type will
5742 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5743 are considered equal by the language itself (or that both types
5744 require structural equality checks). */
5746 tree
5747 build_variant_type_copy (tree type MEM_STAT_DECL)
5749 tree t, m = TYPE_MAIN_VARIANT (type);
5751 t = build_distinct_type_copy (type PASS_MEM_STAT);
5753 /* Since we're building a variant, assume that it is a non-semantic
5754 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5755 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5756 /* Type variants have no alias set defined. */
5757 TYPE_ALIAS_SET (t) = -1;
5759 /* Add the new type to the chain of variants of TYPE. */
5760 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5761 TYPE_NEXT_VARIANT (m) = t;
5762 TYPE_MAIN_VARIANT (t) = m;
5764 return t;
5767 /* Return true if the from tree in both tree maps are equal. */
5770 tree_map_base_eq (const void *va, const void *vb)
5772 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5773 *const b = (const struct tree_map_base *) vb;
5774 return (a->from == b->from);
5777 /* Hash a from tree in a tree_base_map. */
5779 unsigned int
5780 tree_map_base_hash (const void *item)
5782 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5785 /* Return true if this tree map structure is marked for garbage collection
5786 purposes. We simply return true if the from tree is marked, so that this
5787 structure goes away when the from tree goes away. */
5789 bool
5790 tree_map_base_marked_p (const void *p)
5792 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5795 /* Hash a from tree in a tree_map. */
5797 unsigned int
5798 tree_map_hash (const void *item)
5800 return (((const struct tree_map *) item)->hash);
5803 /* Hash a from tree in a tree_decl_map. */
5805 unsigned int
5806 tree_decl_map_hash (const void *item)
5808 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5811 /* Return the initialization priority for DECL. */
5813 priority_type
5814 decl_init_priority_lookup (tree decl)
5816 symtab_node *snode = symtab_node::get (decl);
5818 if (!snode)
5819 return DEFAULT_INIT_PRIORITY;
5820 return
5821 snode->get_init_priority ();
5824 /* Return the finalization priority for DECL. */
5826 priority_type
5827 decl_fini_priority_lookup (tree decl)
5829 cgraph_node *node = cgraph_node::get (decl);
5831 if (!node)
5832 return DEFAULT_INIT_PRIORITY;
5833 return
5834 node->get_fini_priority ();
5837 /* Set the initialization priority for DECL to PRIORITY. */
5839 void
5840 decl_init_priority_insert (tree decl, priority_type priority)
5842 struct symtab_node *snode;
5844 if (priority == DEFAULT_INIT_PRIORITY)
5846 snode = symtab_node::get (decl);
5847 if (!snode)
5848 return;
5850 else if (VAR_P (decl))
5851 snode = varpool_node::get_create (decl);
5852 else
5853 snode = cgraph_node::get_create (decl);
5854 snode->set_init_priority (priority);
5857 /* Set the finalization priority for DECL to PRIORITY. */
5859 void
5860 decl_fini_priority_insert (tree decl, priority_type priority)
5862 struct cgraph_node *node;
5864 if (priority == DEFAULT_INIT_PRIORITY)
5866 node = cgraph_node::get (decl);
5867 if (!node)
5868 return;
5870 else
5871 node = cgraph_node::get_create (decl);
5872 node->set_fini_priority (priority);
5875 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5877 static void
5878 print_debug_expr_statistics (void)
5880 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5881 (long) debug_expr_for_decl->size (),
5882 (long) debug_expr_for_decl->elements (),
5883 debug_expr_for_decl->collisions ());
5886 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5888 static void
5889 print_value_expr_statistics (void)
5891 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5892 (long) value_expr_for_decl->size (),
5893 (long) value_expr_for_decl->elements (),
5894 value_expr_for_decl->collisions ());
5897 /* Lookup a debug expression for FROM, and return it if we find one. */
5899 tree
5900 decl_debug_expr_lookup (tree from)
5902 struct tree_decl_map *h, in;
5903 in.base.from = from;
5905 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5906 if (h)
5907 return h->to;
5908 return NULL_TREE;
5911 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5913 void
5914 decl_debug_expr_insert (tree from, tree to)
5916 struct tree_decl_map *h;
5918 h = ggc_alloc<tree_decl_map> ();
5919 h->base.from = from;
5920 h->to = to;
5921 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5924 /* Lookup a value expression for FROM, and return it if we find one. */
5926 tree
5927 decl_value_expr_lookup (tree from)
5929 struct tree_decl_map *h, in;
5930 in.base.from = from;
5932 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5933 if (h)
5934 return h->to;
5935 return NULL_TREE;
5938 /* Insert a mapping FROM->TO in the value expression hashtable. */
5940 void
5941 decl_value_expr_insert (tree from, tree to)
5943 struct tree_decl_map *h;
5945 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5946 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5948 h = ggc_alloc<tree_decl_map> ();
5949 h->base.from = from;
5950 h->to = to;
5951 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5954 /* Lookup a vector of debug arguments for FROM, and return it if we
5955 find one. */
5957 vec<tree, va_gc> **
5958 decl_debug_args_lookup (tree from)
5960 struct tree_vec_map *h, in;
5962 if (!DECL_HAS_DEBUG_ARGS_P (from))
5963 return NULL;
5964 gcc_checking_assert (debug_args_for_decl != NULL);
5965 in.base.from = from;
5966 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5967 if (h)
5968 return &h->to;
5969 return NULL;
5972 /* Insert a mapping FROM->empty vector of debug arguments in the value
5973 expression hashtable. */
5975 vec<tree, va_gc> **
5976 decl_debug_args_insert (tree from)
5978 struct tree_vec_map *h;
5979 tree_vec_map **loc;
5981 if (DECL_HAS_DEBUG_ARGS_P (from))
5982 return decl_debug_args_lookup (from);
5983 if (debug_args_for_decl == NULL)
5984 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5985 h = ggc_alloc<tree_vec_map> ();
5986 h->base.from = from;
5987 h->to = NULL;
5988 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5989 *loc = h;
5990 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5991 return &h->to;
5994 /* Hashing of types so that we don't make duplicates.
5995 The entry point is `type_hash_canon'. */
5997 /* Generate the default hash code for TYPE. This is designed for
5998 speed, rather than maximum entropy. */
6000 hashval_t
6001 type_hash_canon_hash (tree type)
6003 inchash::hash hstate;
6005 hstate.add_int (TREE_CODE (type));
6007 if (TREE_TYPE (type))
6008 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6010 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6011 /* Just the identifier is adequate to distinguish. */
6012 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6014 switch (TREE_CODE (type))
6016 case METHOD_TYPE:
6017 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6018 /* FALLTHROUGH. */
6019 case FUNCTION_TYPE:
6020 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6021 if (TREE_VALUE (t) != error_mark_node)
6022 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6023 break;
6025 case OFFSET_TYPE:
6026 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6027 break;
6029 case ARRAY_TYPE:
6031 if (TYPE_DOMAIN (type))
6032 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6033 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6035 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6036 hstate.add_object (typeless);
6039 break;
6041 case INTEGER_TYPE:
6043 tree t = TYPE_MAX_VALUE (type);
6044 if (!t)
6045 t = TYPE_MIN_VALUE (type);
6046 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6047 hstate.add_object (TREE_INT_CST_ELT (t, i));
6048 break;
6051 case BITINT_TYPE:
6053 unsigned prec = TYPE_PRECISION (type);
6054 unsigned uns = TYPE_UNSIGNED (type);
6055 hstate.add_object (prec);
6056 hstate.add_int (uns);
6057 break;
6060 case REAL_TYPE:
6061 case FIXED_POINT_TYPE:
6063 unsigned prec = TYPE_PRECISION (type);
6064 hstate.add_object (prec);
6065 break;
6068 case VECTOR_TYPE:
6069 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6070 break;
6072 default:
6073 break;
6076 return hstate.end ();
6079 /* These are the Hashtable callback functions. */
6081 /* Returns true iff the types are equivalent. */
6083 bool
6084 type_cache_hasher::equal (type_hash *a, type_hash *b)
6086 /* First test the things that are the same for all types. */
6087 if (a->hash != b->hash
6088 || TREE_CODE (a->type) != TREE_CODE (b->type)
6089 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6090 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6091 TYPE_ATTRIBUTES (b->type))
6092 || (TREE_CODE (a->type) != COMPLEX_TYPE
6093 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6094 return false;
6096 /* Be careful about comparing arrays before and after the element type
6097 has been completed; don't compare TYPE_ALIGN unless both types are
6098 complete. */
6099 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6100 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6101 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6102 return false;
6104 switch (TREE_CODE (a->type))
6106 case VOID_TYPE:
6107 case OPAQUE_TYPE:
6108 case COMPLEX_TYPE:
6109 case POINTER_TYPE:
6110 case REFERENCE_TYPE:
6111 case NULLPTR_TYPE:
6112 return true;
6114 case VECTOR_TYPE:
6115 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6116 TYPE_VECTOR_SUBPARTS (b->type));
6118 case ENUMERAL_TYPE:
6119 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6120 && !(TYPE_VALUES (a->type)
6121 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6122 && TYPE_VALUES (b->type)
6123 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6124 && type_list_equal (TYPE_VALUES (a->type),
6125 TYPE_VALUES (b->type))))
6126 return false;
6128 /* fall through */
6130 case INTEGER_TYPE:
6131 case REAL_TYPE:
6132 case BOOLEAN_TYPE:
6133 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6134 return false;
6135 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6136 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6137 TYPE_MAX_VALUE (b->type)))
6138 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6139 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6140 TYPE_MIN_VALUE (b->type))));
6142 case BITINT_TYPE:
6143 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6144 return false;
6145 return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
6147 case FIXED_POINT_TYPE:
6148 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6150 case OFFSET_TYPE:
6151 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6153 case METHOD_TYPE:
6154 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6155 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6156 || (TYPE_ARG_TYPES (a->type)
6157 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6158 && TYPE_ARG_TYPES (b->type)
6159 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6160 && type_list_equal (TYPE_ARG_TYPES (a->type),
6161 TYPE_ARG_TYPES (b->type)))))
6162 break;
6163 return false;
6164 case ARRAY_TYPE:
6165 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6166 where the flag should be inherited from the element type
6167 and can change after ARRAY_TYPEs are created; on non-aggregates
6168 compare it and hash it, scalars will never have that flag set
6169 and we need to differentiate between arrays created by different
6170 front-ends or middle-end created arrays. */
6171 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6172 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6173 || (TYPE_TYPELESS_STORAGE (a->type)
6174 == TYPE_TYPELESS_STORAGE (b->type))));
6176 case RECORD_TYPE:
6177 case UNION_TYPE:
6178 case QUAL_UNION_TYPE:
6179 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6180 || (TYPE_FIELDS (a->type)
6181 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6182 && TYPE_FIELDS (b->type)
6183 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6184 && type_list_equal (TYPE_FIELDS (a->type),
6185 TYPE_FIELDS (b->type))));
6187 case FUNCTION_TYPE:
6188 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6189 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6190 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6191 || (TYPE_ARG_TYPES (a->type)
6192 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6193 && TYPE_ARG_TYPES (b->type)
6194 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6195 && type_list_equal (TYPE_ARG_TYPES (a->type),
6196 TYPE_ARG_TYPES (b->type))))
6197 break;
6198 return false;
6200 default:
6201 return false;
6204 if (lang_hooks.types.type_hash_eq != NULL)
6205 return lang_hooks.types.type_hash_eq (a->type, b->type);
6207 return true;
6210 /* Given TYPE, and HASHCODE its hash code, return the canonical
6211 object for an identical type if one already exists.
6212 Otherwise, return TYPE, and record it as the canonical object.
6214 To use this function, first create a type of the sort you want.
6215 Then compute its hash code from the fields of the type that
6216 make it different from other similar types.
6217 Then call this function and use the value. */
6219 tree
6220 type_hash_canon (unsigned int hashcode, tree type)
6222 type_hash in;
6223 type_hash **loc;
6225 /* The hash table only contains main variants, so ensure that's what we're
6226 being passed. */
6227 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6229 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6230 must call that routine before comparing TYPE_ALIGNs. */
6231 layout_type (type);
6233 in.hash = hashcode;
6234 in.type = type;
6236 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6237 if (*loc)
6239 tree t1 = ((type_hash *) *loc)->type;
6240 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6241 && t1 != type);
6242 if (TYPE_UID (type) + 1 == next_type_uid)
6243 --next_type_uid;
6244 /* Free also min/max values and the cache for integer
6245 types. This can't be done in free_node, as LTO frees
6246 those on its own. */
6247 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
6249 if (TYPE_MIN_VALUE (type)
6250 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6252 /* Zero is always in TYPE_CACHED_VALUES. */
6253 if (! TYPE_UNSIGNED (type))
6254 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6255 ggc_free (TYPE_MIN_VALUE (type));
6257 if (TYPE_MAX_VALUE (type)
6258 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6260 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6261 ggc_free (TYPE_MAX_VALUE (type));
6263 if (TYPE_CACHED_VALUES_P (type))
6264 ggc_free (TYPE_CACHED_VALUES (type));
6266 free_node (type);
6267 return t1;
6269 else
6271 struct type_hash *h;
6273 h = ggc_alloc<type_hash> ();
6274 h->hash = hashcode;
6275 h->type = type;
6276 *loc = h;
6278 return type;
6282 static void
6283 print_type_hash_statistics (void)
6285 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6286 (long) type_hash_table->size (),
6287 (long) type_hash_table->elements (),
6288 type_hash_table->collisions ());
6291 /* Given two lists of types
6292 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6293 return 1 if the lists contain the same types in the same order.
6294 Also, the TREE_PURPOSEs must match. */
6296 bool
6297 type_list_equal (const_tree l1, const_tree l2)
6299 const_tree t1, t2;
6301 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6302 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6303 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6304 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6305 && (TREE_TYPE (TREE_PURPOSE (t1))
6306 == TREE_TYPE (TREE_PURPOSE (t2))))))
6307 return false;
6309 return t1 == t2;
6312 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6313 given by TYPE. If the argument list accepts variable arguments,
6314 then this function counts only the ordinary arguments. */
6317 type_num_arguments (const_tree fntype)
6319 int i = 0;
6321 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6322 /* If the function does not take a variable number of arguments,
6323 the last element in the list will have type `void'. */
6324 if (VOID_TYPE_P (TREE_VALUE (t)))
6325 break;
6326 else
6327 ++i;
6329 return i;
6332 /* Return the type of the function TYPE's argument ARGNO if known.
6333 For vararg function's where ARGNO refers to one of the variadic
6334 arguments return null. Otherwise, return a void_type_node for
6335 out-of-bounds ARGNO. */
6337 tree
6338 type_argument_type (const_tree fntype, unsigned argno)
6340 /* Treat zero the same as an out-of-bounds argument number. */
6341 if (!argno)
6342 return void_type_node;
6344 function_args_iterator iter;
6346 tree argtype;
6347 unsigned i = 1;
6348 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6350 /* A vararg function's argument list ends in a null. Otherwise,
6351 an ordinary function's argument list ends with void. Return
6352 null if ARGNO refers to a vararg argument, void_type_node if
6353 it's out of bounds, and the formal argument type otherwise. */
6354 if (!argtype)
6355 break;
6357 if (i == argno || VOID_TYPE_P (argtype))
6358 return argtype;
6360 ++i;
6363 return NULL_TREE;
6366 /* True if integer constants T1 and T2
6367 represent the same constant value. */
6369 bool
6370 tree_int_cst_equal (const_tree t1, const_tree t2)
6372 if (t1 == t2)
6373 return true;
6375 if (t1 == 0 || t2 == 0)
6376 return false;
6378 STRIP_ANY_LOCATION_WRAPPER (t1);
6379 STRIP_ANY_LOCATION_WRAPPER (t2);
6381 if (TREE_CODE (t1) == INTEGER_CST
6382 && TREE_CODE (t2) == INTEGER_CST
6383 && wi::to_widest (t1) == wi::to_widest (t2))
6384 return true;
6386 return false;
6389 /* Return true if T is an INTEGER_CST whose numerical value (extended
6390 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6392 bool
6393 tree_fits_shwi_p (const_tree t)
6395 return (t != NULL_TREE
6396 && TREE_CODE (t) == INTEGER_CST
6397 && wi::fits_shwi_p (wi::to_widest (t)));
6400 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6401 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6403 bool
6404 tree_fits_poly_int64_p (const_tree t)
6406 if (t == NULL_TREE)
6407 return false;
6408 if (POLY_INT_CST_P (t))
6410 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6411 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6412 return false;
6413 return true;
6415 return (TREE_CODE (t) == INTEGER_CST
6416 && wi::fits_shwi_p (wi::to_widest (t)));
6419 /* Return true if T is an INTEGER_CST whose numerical value (extended
6420 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6422 bool
6423 tree_fits_uhwi_p (const_tree t)
6425 return (t != NULL_TREE
6426 && TREE_CODE (t) == INTEGER_CST
6427 && wi::fits_uhwi_p (wi::to_widest (t)));
6430 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6431 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6433 bool
6434 tree_fits_poly_uint64_p (const_tree t)
6436 if (t == NULL_TREE)
6437 return false;
6438 if (POLY_INT_CST_P (t))
6440 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6441 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6442 return false;
6443 return true;
6445 return (TREE_CODE (t) == INTEGER_CST
6446 && wi::fits_uhwi_p (wi::to_widest (t)));
6449 /* T is an INTEGER_CST whose numerical value (extended according to
6450 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6451 HOST_WIDE_INT. */
6453 HOST_WIDE_INT
6454 tree_to_shwi (const_tree t)
6456 gcc_assert (tree_fits_shwi_p (t));
6457 return TREE_INT_CST_LOW (t);
6460 /* T is an INTEGER_CST whose numerical value (extended according to
6461 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6462 HOST_WIDE_INT. */
6464 unsigned HOST_WIDE_INT
6465 tree_to_uhwi (const_tree t)
6467 gcc_assert (tree_fits_uhwi_p (t));
6468 return TREE_INT_CST_LOW (t);
6471 /* Return the most significant (sign) bit of T. */
6474 tree_int_cst_sign_bit (const_tree t)
6476 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6478 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6481 /* Return an indication of the sign of the integer constant T.
6482 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6483 Note that -1 will never be returned if T's type is unsigned. */
6486 tree_int_cst_sgn (const_tree t)
6488 if (wi::to_wide (t) == 0)
6489 return 0;
6490 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6491 return 1;
6492 else if (wi::neg_p (wi::to_wide (t)))
6493 return -1;
6494 else
6495 return 1;
6498 /* Return the minimum number of bits needed to represent VALUE in a
6499 signed or unsigned type, UNSIGNEDP says which. */
6501 unsigned int
6502 tree_int_cst_min_precision (tree value, signop sgn)
6504 /* If the value is negative, compute its negative minus 1. The latter
6505 adjustment is because the absolute value of the largest negative value
6506 is one larger than the largest positive value. This is equivalent to
6507 a bit-wise negation, so use that operation instead. */
6509 if (tree_int_cst_sgn (value) < 0)
6510 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6512 /* Return the number of bits needed, taking into account the fact
6513 that we need one more bit for a signed than unsigned type.
6514 If value is 0 or -1, the minimum precision is 1 no matter
6515 whether unsignedp is true or false. */
6517 if (integer_zerop (value))
6518 return 1;
6519 else
6520 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6523 /* Return truthvalue of whether T1 is the same tree structure as T2.
6524 Return 1 if they are the same.
6525 Return 0 if they are understandably different.
6526 Return -1 if either contains tree structure not understood by
6527 this function. */
6530 simple_cst_equal (const_tree t1, const_tree t2)
6532 enum tree_code code1, code2;
6533 int cmp;
6534 int i;
6536 if (t1 == t2)
6537 return 1;
6538 if (t1 == 0 || t2 == 0)
6539 return 0;
6541 /* For location wrappers to be the same, they must be at the same
6542 source location (and wrap the same thing). */
6543 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6545 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6546 return 0;
6547 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6550 code1 = TREE_CODE (t1);
6551 code2 = TREE_CODE (t2);
6553 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6555 if (CONVERT_EXPR_CODE_P (code2)
6556 || code2 == NON_LVALUE_EXPR)
6557 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6558 else
6559 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6562 else if (CONVERT_EXPR_CODE_P (code2)
6563 || code2 == NON_LVALUE_EXPR)
6564 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6566 if (code1 != code2)
6567 return 0;
6569 switch (code1)
6571 case INTEGER_CST:
6572 return wi::to_widest (t1) == wi::to_widest (t2);
6574 case REAL_CST:
6575 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6577 case FIXED_CST:
6578 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6580 case STRING_CST:
6581 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6582 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6583 TREE_STRING_LENGTH (t1)));
6585 case CONSTRUCTOR:
6587 unsigned HOST_WIDE_INT idx;
6588 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6589 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6591 if (vec_safe_length (v1) != vec_safe_length (v2))
6592 return false;
6594 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6595 /* ??? Should we handle also fields here? */
6596 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6597 return false;
6598 return true;
6601 case SAVE_EXPR:
6602 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6604 case CALL_EXPR:
6605 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6606 if (cmp <= 0)
6607 return cmp;
6608 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6609 return 0;
6611 const_tree arg1, arg2;
6612 const_call_expr_arg_iterator iter1, iter2;
6613 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6614 arg2 = first_const_call_expr_arg (t2, &iter2);
6615 arg1 && arg2;
6616 arg1 = next_const_call_expr_arg (&iter1),
6617 arg2 = next_const_call_expr_arg (&iter2))
6619 cmp = simple_cst_equal (arg1, arg2);
6620 if (cmp <= 0)
6621 return cmp;
6623 return arg1 == arg2;
6626 case TARGET_EXPR:
6627 /* Special case: if either target is an unallocated VAR_DECL,
6628 it means that it's going to be unified with whatever the
6629 TARGET_EXPR is really supposed to initialize, so treat it
6630 as being equivalent to anything. */
6631 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6632 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6633 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6634 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6635 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6636 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6637 cmp = 1;
6638 else
6639 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6641 if (cmp <= 0)
6642 return cmp;
6644 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6646 case WITH_CLEANUP_EXPR:
6647 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6648 if (cmp <= 0)
6649 return cmp;
6651 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6653 case COMPONENT_REF:
6654 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6655 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6657 return 0;
6659 case VAR_DECL:
6660 case PARM_DECL:
6661 case CONST_DECL:
6662 case FUNCTION_DECL:
6663 return 0;
6665 default:
6666 if (POLY_INT_CST_P (t1))
6667 /* A false return means maybe_ne rather than known_ne. */
6668 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6669 TYPE_SIGN (TREE_TYPE (t1))),
6670 poly_widest_int::from (poly_int_cst_value (t2),
6671 TYPE_SIGN (TREE_TYPE (t2))));
6672 break;
6675 /* This general rule works for most tree codes. All exceptions should be
6676 handled above. If this is a language-specific tree code, we can't
6677 trust what might be in the operand, so say we don't know
6678 the situation. */
6679 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6680 return -1;
6682 switch (TREE_CODE_CLASS (code1))
6684 case tcc_unary:
6685 case tcc_binary:
6686 case tcc_comparison:
6687 case tcc_expression:
6688 case tcc_reference:
6689 case tcc_statement:
6690 cmp = 1;
6691 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6693 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6694 if (cmp <= 0)
6695 return cmp;
6698 return cmp;
6700 default:
6701 return -1;
6705 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6706 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6707 than U, respectively. */
6710 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6712 if (tree_int_cst_sgn (t) < 0)
6713 return -1;
6714 else if (!tree_fits_uhwi_p (t))
6715 return 1;
6716 else if (TREE_INT_CST_LOW (t) == u)
6717 return 0;
6718 else if (TREE_INT_CST_LOW (t) < u)
6719 return -1;
6720 else
6721 return 1;
6724 /* Return true if SIZE represents a constant size that is in bounds of
6725 what the middle-end and the backend accepts (covering not more than
6726 half of the address-space).
6727 When PERR is non-null, set *PERR on failure to the description of
6728 why SIZE is not valid. */
6730 bool
6731 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6733 if (POLY_INT_CST_P (size))
6735 if (TREE_OVERFLOW (size))
6736 return false;
6737 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6738 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6739 return false;
6740 return true;
6743 cst_size_error error;
6744 if (!perr)
6745 perr = &error;
6747 if (TREE_CODE (size) != INTEGER_CST)
6749 *perr = cst_size_not_constant;
6750 return false;
6753 if (TREE_OVERFLOW_P (size))
6755 *perr = cst_size_overflow;
6756 return false;
6759 if (tree_int_cst_sgn (size) < 0)
6761 *perr = cst_size_negative;
6762 return false;
6764 if (!tree_fits_uhwi_p (size)
6765 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6766 < wi::to_widest (size) * 2))
6768 *perr = cst_size_too_big;
6769 return false;
6772 return true;
6775 /* Return the precision of the type, or for a complex or vector type the
6776 precision of the type of its elements. */
6778 unsigned int
6779 element_precision (const_tree type)
6781 if (!TYPE_P (type))
6782 type = TREE_TYPE (type);
6783 enum tree_code code = TREE_CODE (type);
6784 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6785 type = TREE_TYPE (type);
6787 return TYPE_PRECISION (type);
6790 /* Return true if CODE represents an associative tree code. Otherwise
6791 return false. */
6792 bool
6793 associative_tree_code (enum tree_code code)
6795 switch (code)
6797 case BIT_IOR_EXPR:
6798 case BIT_AND_EXPR:
6799 case BIT_XOR_EXPR:
6800 case PLUS_EXPR:
6801 case MULT_EXPR:
6802 case MIN_EXPR:
6803 case MAX_EXPR:
6804 return true;
6806 default:
6807 break;
6809 return false;
6812 /* Return true if CODE represents a commutative tree code. Otherwise
6813 return false. */
6814 bool
6815 commutative_tree_code (enum tree_code code)
6817 switch (code)
6819 case PLUS_EXPR:
6820 case MULT_EXPR:
6821 case MULT_HIGHPART_EXPR:
6822 case MIN_EXPR:
6823 case MAX_EXPR:
6824 case BIT_IOR_EXPR:
6825 case BIT_XOR_EXPR:
6826 case BIT_AND_EXPR:
6827 case NE_EXPR:
6828 case EQ_EXPR:
6829 case UNORDERED_EXPR:
6830 case ORDERED_EXPR:
6831 case UNEQ_EXPR:
6832 case LTGT_EXPR:
6833 case TRUTH_AND_EXPR:
6834 case TRUTH_XOR_EXPR:
6835 case TRUTH_OR_EXPR:
6836 case WIDEN_MULT_EXPR:
6837 case VEC_WIDEN_MULT_HI_EXPR:
6838 case VEC_WIDEN_MULT_LO_EXPR:
6839 case VEC_WIDEN_MULT_EVEN_EXPR:
6840 case VEC_WIDEN_MULT_ODD_EXPR:
6841 return true;
6843 default:
6844 break;
6846 return false;
6849 /* Return true if CODE represents a ternary tree code for which the
6850 first two operands are commutative. Otherwise return false. */
6851 bool
6852 commutative_ternary_tree_code (enum tree_code code)
6854 switch (code)
6856 case WIDEN_MULT_PLUS_EXPR:
6857 case WIDEN_MULT_MINUS_EXPR:
6858 case DOT_PROD_EXPR:
6859 return true;
6861 default:
6862 break;
6864 return false;
6867 /* Returns true if CODE can overflow. */
6869 bool
6870 operation_can_overflow (enum tree_code code)
6872 switch (code)
6874 case PLUS_EXPR:
6875 case MINUS_EXPR:
6876 case MULT_EXPR:
6877 case LSHIFT_EXPR:
6878 /* Can overflow in various ways. */
6879 return true;
6880 case TRUNC_DIV_EXPR:
6881 case EXACT_DIV_EXPR:
6882 case FLOOR_DIV_EXPR:
6883 case CEIL_DIV_EXPR:
6884 /* For INT_MIN / -1. */
6885 return true;
6886 case NEGATE_EXPR:
6887 case ABS_EXPR:
6888 /* For -INT_MIN. */
6889 return true;
6890 default:
6891 /* These operators cannot overflow. */
6892 return false;
6896 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6897 ftrapv doesn't generate trapping insns for CODE. */
6899 bool
6900 operation_no_trapping_overflow (tree type, enum tree_code code)
6902 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6904 /* We don't generate instructions that trap on overflow for complex or vector
6905 types. */
6906 if (!INTEGRAL_TYPE_P (type))
6907 return true;
6909 if (!TYPE_OVERFLOW_TRAPS (type))
6910 return true;
6912 switch (code)
6914 case PLUS_EXPR:
6915 case MINUS_EXPR:
6916 case MULT_EXPR:
6917 case NEGATE_EXPR:
6918 case ABS_EXPR:
6919 /* These operators can overflow, and -ftrapv generates trapping code for
6920 these. */
6921 return false;
6922 case TRUNC_DIV_EXPR:
6923 case EXACT_DIV_EXPR:
6924 case FLOOR_DIV_EXPR:
6925 case CEIL_DIV_EXPR:
6926 case LSHIFT_EXPR:
6927 /* These operators can overflow, but -ftrapv does not generate trapping
6928 code for these. */
6929 return true;
6930 default:
6931 /* These operators cannot overflow. */
6932 return true;
6936 /* Constructors for pointer, array and function types.
6937 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6938 constructed by language-dependent code, not here.) */
6940 /* Construct, lay out and return the type of pointers to TO_TYPE with
6941 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6942 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6943 indicate this type can reference all of memory. If such a type has
6944 already been constructed, reuse it. */
6946 tree
6947 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6948 bool can_alias_all)
6950 tree t;
6951 bool could_alias = can_alias_all;
6953 if (to_type == error_mark_node)
6954 return error_mark_node;
6956 if (mode == VOIDmode)
6958 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6959 mode = targetm.addr_space.pointer_mode (as);
6962 /* If the pointed-to type has the may_alias attribute set, force
6963 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6964 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6965 can_alias_all = true;
6967 /* In some cases, languages will have things that aren't a POINTER_TYPE
6968 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6969 In that case, return that type without regard to the rest of our
6970 operands.
6972 ??? This is a kludge, but consistent with the way this function has
6973 always operated and there doesn't seem to be a good way to avoid this
6974 at the moment. */
6975 if (TYPE_POINTER_TO (to_type) != 0
6976 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6977 return TYPE_POINTER_TO (to_type);
6979 /* First, if we already have a type for pointers to TO_TYPE and it's
6980 the proper mode, use it. */
6981 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6982 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6983 return t;
6985 t = make_node (POINTER_TYPE);
6987 TREE_TYPE (t) = to_type;
6988 SET_TYPE_MODE (t, mode);
6989 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6990 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6991 TYPE_POINTER_TO (to_type) = t;
6993 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6994 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6995 SET_TYPE_STRUCTURAL_EQUALITY (t);
6996 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6997 TYPE_CANONICAL (t)
6998 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6999 mode, false);
7001 /* Lay out the type. This function has many callers that are concerned
7002 with expression-construction, and this simplifies them all. */
7003 layout_type (t);
7005 return t;
7008 /* By default build pointers in ptr_mode. */
7010 tree
7011 build_pointer_type (tree to_type)
7013 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7016 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7018 tree
7019 build_reference_type_for_mode (tree to_type, machine_mode mode,
7020 bool can_alias_all)
7022 tree t;
7023 bool could_alias = can_alias_all;
7025 if (to_type == error_mark_node)
7026 return error_mark_node;
7028 if (mode == VOIDmode)
7030 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7031 mode = targetm.addr_space.pointer_mode (as);
7034 /* If the pointed-to type has the may_alias attribute set, force
7035 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7036 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7037 can_alias_all = true;
7039 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7040 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7041 In that case, return that type without regard to the rest of our
7042 operands.
7044 ??? This is a kludge, but consistent with the way this function has
7045 always operated and there doesn't seem to be a good way to avoid this
7046 at the moment. */
7047 if (TYPE_REFERENCE_TO (to_type) != 0
7048 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7049 return TYPE_REFERENCE_TO (to_type);
7051 /* First, if we already have a type for pointers to TO_TYPE and it's
7052 the proper mode, use it. */
7053 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7054 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7055 return t;
7057 t = make_node (REFERENCE_TYPE);
7059 TREE_TYPE (t) = to_type;
7060 SET_TYPE_MODE (t, mode);
7061 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7062 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7063 TYPE_REFERENCE_TO (to_type) = t;
7065 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7066 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7067 SET_TYPE_STRUCTURAL_EQUALITY (t);
7068 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7069 TYPE_CANONICAL (t)
7070 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7071 mode, false);
7073 layout_type (t);
7075 return t;
7079 /* Build the node for the type of references-to-TO_TYPE by default
7080 in ptr_mode. */
7082 tree
7083 build_reference_type (tree to_type)
7085 return build_reference_type_for_mode (to_type, VOIDmode, false);
7088 #define MAX_INT_CACHED_PREC \
7089 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7090 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7092 static void
7093 clear_nonstandard_integer_type_cache (void)
7095 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7097 nonstandard_integer_type_cache[i] = NULL;
7101 /* Builds a signed or unsigned integer type of precision PRECISION.
7102 Used for C bitfields whose precision does not match that of
7103 built-in target types. */
7104 tree
7105 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7106 int unsignedp)
7108 tree itype, ret;
7110 if (unsignedp)
7111 unsignedp = MAX_INT_CACHED_PREC + 1;
7113 if (precision <= MAX_INT_CACHED_PREC)
7115 itype = nonstandard_integer_type_cache[precision + unsignedp];
7116 if (itype)
7117 return itype;
7120 itype = make_node (INTEGER_TYPE);
7121 TYPE_PRECISION (itype) = precision;
7123 if (unsignedp)
7124 fixup_unsigned_type (itype);
7125 else
7126 fixup_signed_type (itype);
7128 inchash::hash hstate;
7129 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7130 ret = type_hash_canon (hstate.end (), itype);
7131 if (precision <= MAX_INT_CACHED_PREC)
7132 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7134 return ret;
7137 #define MAX_BOOL_CACHED_PREC \
7138 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7139 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7141 /* Builds a boolean type of precision PRECISION.
7142 Used for boolean vectors to choose proper vector element size. */
7143 tree
7144 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7146 tree type;
7148 if (precision <= MAX_BOOL_CACHED_PREC)
7150 type = nonstandard_boolean_type_cache[precision];
7151 if (type)
7152 return type;
7155 type = make_node (BOOLEAN_TYPE);
7156 TYPE_PRECISION (type) = precision;
7157 fixup_signed_type (type);
7159 if (precision <= MAX_INT_CACHED_PREC)
7160 nonstandard_boolean_type_cache[precision] = type;
7162 return type;
7165 static GTY(()) vec<tree, va_gc> *bitint_type_cache;
7167 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7168 tree
7169 build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
7171 tree itype, ret;
7173 gcc_checking_assert (precision >= 1 + !unsignedp);
7175 if (unsignedp)
7176 unsignedp = MAX_INT_CACHED_PREC + 1;
7178 if (bitint_type_cache == NULL)
7179 vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
7181 if (precision <= MAX_INT_CACHED_PREC)
7183 itype = (*bitint_type_cache)[precision + unsignedp];
7184 if (itype)
7185 return itype;
7188 itype = make_node (BITINT_TYPE);
7189 TYPE_PRECISION (itype) = precision;
7191 if (unsignedp)
7192 fixup_unsigned_type (itype);
7193 else
7194 fixup_signed_type (itype);
7196 inchash::hash hstate;
7197 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7198 ret = type_hash_canon (hstate.end (), itype);
7199 if (precision <= MAX_INT_CACHED_PREC)
7200 (*bitint_type_cache)[precision + unsignedp] = ret;
7202 return ret;
7205 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7206 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7207 is true, reuse such a type that has already been constructed. */
7209 static tree
7210 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7212 tree itype = make_node (INTEGER_TYPE);
7214 TREE_TYPE (itype) = type;
7216 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7217 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7219 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7220 SET_TYPE_MODE (itype, TYPE_MODE (type));
7221 TYPE_SIZE (itype) = TYPE_SIZE (type);
7222 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7223 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7224 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7225 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7227 if (!shared)
7228 return itype;
7230 if ((TYPE_MIN_VALUE (itype)
7231 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7232 || (TYPE_MAX_VALUE (itype)
7233 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7235 /* Since we cannot reliably merge this type, we need to compare it using
7236 structural equality checks. */
7237 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7238 return itype;
7241 hashval_t hash = type_hash_canon_hash (itype);
7242 itype = type_hash_canon (hash, itype);
7244 return itype;
7247 /* Wrapper around build_range_type_1 with SHARED set to true. */
7249 tree
7250 build_range_type (tree type, tree lowval, tree highval)
7252 return build_range_type_1 (type, lowval, highval, true);
7255 /* Wrapper around build_range_type_1 with SHARED set to false. */
7257 tree
7258 build_nonshared_range_type (tree type, tree lowval, tree highval)
7260 return build_range_type_1 (type, lowval, highval, false);
7263 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7264 MAXVAL should be the maximum value in the domain
7265 (one less than the length of the array).
7267 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7268 We don't enforce this limit, that is up to caller (e.g. language front end).
7269 The limit exists because the result is a signed type and we don't handle
7270 sizes that use more than one HOST_WIDE_INT. */
7272 tree
7273 build_index_type (tree maxval)
7275 return build_range_type (sizetype, size_zero_node, maxval);
7278 /* Return true if the debug information for TYPE, a subtype, should be emitted
7279 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7280 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7281 debug info and doesn't reflect the source code. */
7283 bool
7284 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7286 tree base_type = TREE_TYPE (type), low, high;
7288 /* Subrange types have a base type which is an integral type. */
7289 if (!INTEGRAL_TYPE_P (base_type))
7290 return false;
7292 /* Get the real bounds of the subtype. */
7293 if (lang_hooks.types.get_subrange_bounds)
7294 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7295 else
7297 low = TYPE_MIN_VALUE (type);
7298 high = TYPE_MAX_VALUE (type);
7301 /* If the type and its base type have the same representation and the same
7302 name, then the type is not a subrange but a copy of the base type. */
7303 if ((TREE_CODE (base_type) == INTEGER_TYPE
7304 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7305 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7306 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7307 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7308 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7309 return false;
7311 if (lowval)
7312 *lowval = low;
7313 if (highval)
7314 *highval = high;
7315 return true;
7318 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7319 and number of elements specified by the range of values of INDEX_TYPE.
7320 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7321 If SHARED is true, reuse such a type that has already been constructed.
7322 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7324 tree
7325 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7326 bool shared, bool set_canonical)
7328 tree t;
7330 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7332 error ("arrays of functions are not meaningful");
7333 elt_type = integer_type_node;
7336 t = make_node (ARRAY_TYPE);
7337 TREE_TYPE (t) = elt_type;
7338 TYPE_DOMAIN (t) = index_type;
7339 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7340 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7341 layout_type (t);
7343 if (shared)
7345 hashval_t hash = type_hash_canon_hash (t);
7346 t = type_hash_canon (hash, t);
7349 if (TYPE_CANONICAL (t) == t && set_canonical)
7351 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7352 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7353 || in_lto_p)
7354 SET_TYPE_STRUCTURAL_EQUALITY (t);
7355 else if (TYPE_CANONICAL (elt_type) != elt_type
7356 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7357 TYPE_CANONICAL (t)
7358 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7359 index_type
7360 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7361 typeless_storage, shared, set_canonical);
7364 return t;
7367 /* Wrapper around build_array_type_1 with SHARED set to true. */
7369 tree
7370 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7372 return
7373 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7376 /* Wrapper around build_array_type_1 with SHARED set to false. */
7378 tree
7379 build_nonshared_array_type (tree elt_type, tree index_type)
7381 return build_array_type_1 (elt_type, index_type, false, false, true);
7384 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7385 sizetype. */
7387 tree
7388 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7390 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7393 /* Computes the canonical argument types from the argument type list
7394 ARGTYPES.
7396 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7397 on entry to this function, or if any of the ARGTYPES are
7398 structural.
7400 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7401 true on entry to this function, or if any of the ARGTYPES are
7402 non-canonical.
7404 Returns a canonical argument list, which may be ARGTYPES when the
7405 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7406 true) or would not differ from ARGTYPES. */
7408 static tree
7409 maybe_canonicalize_argtypes (tree argtypes,
7410 bool *any_structural_p,
7411 bool *any_noncanonical_p)
7413 tree arg;
7414 bool any_noncanonical_argtypes_p = false;
7416 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7418 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7419 /* Fail gracefully by stating that the type is structural. */
7420 *any_structural_p = true;
7421 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7422 *any_structural_p = true;
7423 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7424 || TREE_PURPOSE (arg))
7425 /* If the argument has a default argument, we consider it
7426 non-canonical even though the type itself is canonical.
7427 That way, different variants of function and method types
7428 with default arguments will all point to the variant with
7429 no defaults as their canonical type. */
7430 any_noncanonical_argtypes_p = true;
7433 if (*any_structural_p)
7434 return argtypes;
7436 if (any_noncanonical_argtypes_p)
7438 /* Build the canonical list of argument types. */
7439 tree canon_argtypes = NULL_TREE;
7440 bool is_void = false;
7442 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7444 if (arg == void_list_node)
7445 is_void = true;
7446 else
7447 canon_argtypes = tree_cons (NULL_TREE,
7448 TYPE_CANONICAL (TREE_VALUE (arg)),
7449 canon_argtypes);
7452 canon_argtypes = nreverse (canon_argtypes);
7453 if (is_void)
7454 canon_argtypes = chainon (canon_argtypes, void_list_node);
7456 /* There is a non-canonical type. */
7457 *any_noncanonical_p = true;
7458 return canon_argtypes;
7461 /* The canonical argument types are the same as ARGTYPES. */
7462 return argtypes;
7465 /* Construct, lay out and return
7466 the type of functions returning type VALUE_TYPE
7467 given arguments of types ARG_TYPES.
7468 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7469 are data type nodes for the arguments of the function.
7470 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7471 variable-arguments function with (...) prototype (no named arguments).
7472 If such a type has already been constructed, reuse it. */
7474 tree
7475 build_function_type (tree value_type, tree arg_types,
7476 bool no_named_args_stdarg_p)
7478 tree t;
7479 inchash::hash hstate;
7480 bool any_structural_p, any_noncanonical_p;
7481 tree canon_argtypes;
7483 gcc_assert (arg_types != error_mark_node);
7485 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7487 error ("function return type cannot be function");
7488 value_type = integer_type_node;
7491 /* Make a node of the sort we want. */
7492 t = make_node (FUNCTION_TYPE);
7493 TREE_TYPE (t) = value_type;
7494 TYPE_ARG_TYPES (t) = arg_types;
7495 if (no_named_args_stdarg_p)
7497 gcc_assert (arg_types == NULL_TREE);
7498 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7501 /* If we already have such a type, use the old one. */
7502 hashval_t hash = type_hash_canon_hash (t);
7503 t = type_hash_canon (hash, t);
7505 /* Set up the canonical type. */
7506 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7507 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7508 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7509 &any_structural_p,
7510 &any_noncanonical_p);
7511 if (any_structural_p)
7512 SET_TYPE_STRUCTURAL_EQUALITY (t);
7513 else if (any_noncanonical_p)
7514 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7515 canon_argtypes);
7517 if (!COMPLETE_TYPE_P (t))
7518 layout_type (t);
7519 return t;
7522 /* Build a function type. The RETURN_TYPE is the type returned by the
7523 function. If VAARGS is set, no void_type_node is appended to the
7524 list. ARGP must be always be terminated be a NULL_TREE. */
7526 static tree
7527 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7529 tree t, args, last;
7531 t = va_arg (argp, tree);
7532 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7533 args = tree_cons (NULL_TREE, t, args);
7535 if (vaargs)
7537 last = args;
7538 if (args != NULL_TREE)
7539 args = nreverse (args);
7540 gcc_assert (last != void_list_node);
7542 else if (args == NULL_TREE)
7543 args = void_list_node;
7544 else
7546 last = args;
7547 args = nreverse (args);
7548 TREE_CHAIN (last) = void_list_node;
7550 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7552 return args;
7555 /* Build a function type. The RETURN_TYPE is the type returned by the
7556 function. If additional arguments are provided, they are
7557 additional argument types. The list of argument types must always
7558 be terminated by NULL_TREE. */
7560 tree
7561 build_function_type_list (tree return_type, ...)
7563 tree args;
7564 va_list p;
7566 va_start (p, return_type);
7567 args = build_function_type_list_1 (false, return_type, p);
7568 va_end (p);
7569 return args;
7572 /* Build a variable argument function type. The RETURN_TYPE is the
7573 type returned by the function. If additional arguments are provided,
7574 they are additional argument types. The list of argument types must
7575 always be terminated by NULL_TREE. */
7577 tree
7578 build_varargs_function_type_list (tree return_type, ...)
7580 tree args;
7581 va_list p;
7583 va_start (p, return_type);
7584 args = build_function_type_list_1 (true, return_type, p);
7585 va_end (p);
7587 return args;
7590 /* Build a function type. RETURN_TYPE is the type returned by the
7591 function; VAARGS indicates whether the function takes varargs. The
7592 function takes N named arguments, the types of which are provided in
7593 ARG_TYPES. */
7595 static tree
7596 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7597 tree *arg_types)
7599 int i;
7600 tree t = vaargs ? NULL_TREE : void_list_node;
7602 for (i = n - 1; i >= 0; i--)
7603 t = tree_cons (NULL_TREE, arg_types[i], t);
7605 return build_function_type (return_type, t, vaargs && n == 0);
7608 /* Build a function type. RETURN_TYPE is the type returned by the
7609 function. The function takes N named arguments, the types of which
7610 are provided in ARG_TYPES. */
7612 tree
7613 build_function_type_array (tree return_type, int n, tree *arg_types)
7615 return build_function_type_array_1 (false, return_type, n, arg_types);
7618 /* Build a variable argument function type. RETURN_TYPE is the type
7619 returned by the function. The function takes N named arguments, the
7620 types of which are provided in ARG_TYPES. */
7622 tree
7623 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7625 return build_function_type_array_1 (true, return_type, n, arg_types);
7628 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7629 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7630 for the method. An implicit additional parameter (of type
7631 pointer-to-BASETYPE) is added to the ARGTYPES. */
7633 tree
7634 build_method_type_directly (tree basetype,
7635 tree rettype,
7636 tree argtypes)
7638 tree t;
7639 tree ptype;
7640 bool any_structural_p, any_noncanonical_p;
7641 tree canon_argtypes;
7643 /* Make a node of the sort we want. */
7644 t = make_node (METHOD_TYPE);
7646 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7647 TREE_TYPE (t) = rettype;
7648 ptype = build_pointer_type (basetype);
7650 /* The actual arglist for this function includes a "hidden" argument
7651 which is "this". Put it into the list of argument types. */
7652 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7653 TYPE_ARG_TYPES (t) = argtypes;
7655 /* If we already have such a type, use the old one. */
7656 hashval_t hash = type_hash_canon_hash (t);
7657 t = type_hash_canon (hash, t);
7659 /* Set up the canonical type. */
7660 any_structural_p
7661 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7662 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7663 any_noncanonical_p
7664 = (TYPE_CANONICAL (basetype) != basetype
7665 || TYPE_CANONICAL (rettype) != rettype);
7666 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7667 &any_structural_p,
7668 &any_noncanonical_p);
7669 if (any_structural_p)
7670 SET_TYPE_STRUCTURAL_EQUALITY (t);
7671 else if (any_noncanonical_p)
7672 TYPE_CANONICAL (t)
7673 = build_method_type_directly (TYPE_CANONICAL (basetype),
7674 TYPE_CANONICAL (rettype),
7675 canon_argtypes);
7676 if (!COMPLETE_TYPE_P (t))
7677 layout_type (t);
7679 return t;
7682 /* Construct, lay out and return the type of methods belonging to class
7683 BASETYPE and whose arguments and values are described by TYPE.
7684 If that type exists already, reuse it.
7685 TYPE must be a FUNCTION_TYPE node. */
7687 tree
7688 build_method_type (tree basetype, tree type)
7690 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7692 return build_method_type_directly (basetype,
7693 TREE_TYPE (type),
7694 TYPE_ARG_TYPES (type));
7697 /* Construct, lay out and return the type of offsets to a value
7698 of type TYPE, within an object of type BASETYPE.
7699 If a suitable offset type exists already, reuse it. */
7701 tree
7702 build_offset_type (tree basetype, tree type)
7704 tree t;
7706 /* Make a node of the sort we want. */
7707 t = make_node (OFFSET_TYPE);
7709 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7710 TREE_TYPE (t) = type;
7712 /* If we already have such a type, use the old one. */
7713 hashval_t hash = type_hash_canon_hash (t);
7714 t = type_hash_canon (hash, t);
7716 if (!COMPLETE_TYPE_P (t))
7717 layout_type (t);
7719 if (TYPE_CANONICAL (t) == t)
7721 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7722 || TYPE_STRUCTURAL_EQUALITY_P (type))
7723 SET_TYPE_STRUCTURAL_EQUALITY (t);
7724 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7725 || TYPE_CANONICAL (type) != type)
7726 TYPE_CANONICAL (t)
7727 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7728 TYPE_CANONICAL (type));
7731 return t;
7734 /* Create a complex type whose components are COMPONENT_TYPE.
7736 If NAMED is true, the type is given a TYPE_NAME. We do not always
7737 do so because this creates a DECL node and thus make the DECL_UIDs
7738 dependent on the type canonicalization hashtable, which is GC-ed,
7739 so the DECL_UIDs would not be stable wrt garbage collection. */
7741 tree
7742 build_complex_type (tree component_type, bool named)
7744 gcc_assert (INTEGRAL_TYPE_P (component_type)
7745 || SCALAR_FLOAT_TYPE_P (component_type)
7746 || FIXED_POINT_TYPE_P (component_type));
7748 /* Make a node of the sort we want. */
7749 tree probe = make_node (COMPLEX_TYPE);
7751 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7753 /* If we already have such a type, use the old one. */
7754 hashval_t hash = type_hash_canon_hash (probe);
7755 tree t = type_hash_canon (hash, probe);
7757 if (t == probe)
7759 /* We created a new type. The hash insertion will have laid
7760 out the type. We need to check the canonicalization and
7761 maybe set the name. */
7762 gcc_checking_assert (COMPLETE_TYPE_P (t)
7763 && !TYPE_NAME (t)
7764 && TYPE_CANONICAL (t) == t);
7766 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7767 SET_TYPE_STRUCTURAL_EQUALITY (t);
7768 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7769 TYPE_CANONICAL (t)
7770 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7772 /* We need to create a name, since complex is a fundamental type. */
7773 if (named)
7775 const char *name = NULL;
7777 if (TREE_TYPE (t) == char_type_node)
7778 name = "complex char";
7779 else if (TREE_TYPE (t) == signed_char_type_node)
7780 name = "complex signed char";
7781 else if (TREE_TYPE (t) == unsigned_char_type_node)
7782 name = "complex unsigned char";
7783 else if (TREE_TYPE (t) == short_integer_type_node)
7784 name = "complex short int";
7785 else if (TREE_TYPE (t) == short_unsigned_type_node)
7786 name = "complex short unsigned int";
7787 else if (TREE_TYPE (t) == integer_type_node)
7788 name = "complex int";
7789 else if (TREE_TYPE (t) == unsigned_type_node)
7790 name = "complex unsigned int";
7791 else if (TREE_TYPE (t) == long_integer_type_node)
7792 name = "complex long int";
7793 else if (TREE_TYPE (t) == long_unsigned_type_node)
7794 name = "complex long unsigned int";
7795 else if (TREE_TYPE (t) == long_long_integer_type_node)
7796 name = "complex long long int";
7797 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7798 name = "complex long long unsigned int";
7800 if (name != NULL)
7801 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7802 get_identifier (name), t);
7806 return build_qualified_type (t, TYPE_QUALS (component_type));
7809 /* If TYPE is a real or complex floating-point type and the target
7810 does not directly support arithmetic on TYPE then return the wider
7811 type to be used for arithmetic on TYPE. Otherwise, return
7812 NULL_TREE. */
7814 tree
7815 excess_precision_type (tree type)
7817 /* The target can give two different responses to the question of
7818 which excess precision mode it would like depending on whether we
7819 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7821 enum excess_precision_type requested_type
7822 = (flag_excess_precision == EXCESS_PRECISION_FAST
7823 ? EXCESS_PRECISION_TYPE_FAST
7824 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7825 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7827 enum flt_eval_method target_flt_eval_method
7828 = targetm.c.excess_precision (requested_type);
7830 /* The target should not ask for unpredictable float evaluation (though
7831 it might advertise that implicitly the evaluation is unpredictable,
7832 but we don't care about that here, it will have been reported
7833 elsewhere). If it does ask for unpredictable evaluation, we have
7834 nothing to do here. */
7835 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7837 /* Nothing to do. The target has asked for all types we know about
7838 to be computed with their native precision and range. */
7839 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7840 return NULL_TREE;
7842 /* The target will promote this type in a target-dependent way, so excess
7843 precision ought to leave it alone. */
7844 if (targetm.promoted_type (type) != NULL_TREE)
7845 return NULL_TREE;
7847 machine_mode float16_type_mode = (float16_type_node
7848 ? TYPE_MODE (float16_type_node)
7849 : VOIDmode);
7850 machine_mode bfloat16_type_mode = (bfloat16_type_node
7851 ? TYPE_MODE (bfloat16_type_node)
7852 : VOIDmode);
7853 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7854 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7856 switch (TREE_CODE (type))
7858 case REAL_TYPE:
7860 machine_mode type_mode = TYPE_MODE (type);
7861 switch (target_flt_eval_method)
7863 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7864 if (type_mode == float16_type_mode
7865 || type_mode == bfloat16_type_mode)
7866 return float_type_node;
7867 break;
7868 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7869 if (type_mode == float16_type_mode
7870 || type_mode == bfloat16_type_mode
7871 || type_mode == float_type_mode)
7872 return double_type_node;
7873 break;
7874 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7875 if (type_mode == float16_type_mode
7876 || type_mode == bfloat16_type_mode
7877 || type_mode == float_type_mode
7878 || type_mode == double_type_mode)
7879 return long_double_type_node;
7880 break;
7881 default:
7882 gcc_unreachable ();
7884 break;
7886 case COMPLEX_TYPE:
7888 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7889 return NULL_TREE;
7890 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7891 switch (target_flt_eval_method)
7893 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7894 if (type_mode == float16_type_mode
7895 || type_mode == bfloat16_type_mode)
7896 return complex_float_type_node;
7897 break;
7898 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7899 if (type_mode == float16_type_mode
7900 || type_mode == bfloat16_type_mode
7901 || type_mode == float_type_mode)
7902 return complex_double_type_node;
7903 break;
7904 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7905 if (type_mode == float16_type_mode
7906 || type_mode == bfloat16_type_mode
7907 || type_mode == float_type_mode
7908 || type_mode == double_type_mode)
7909 return complex_long_double_type_node;
7910 break;
7911 default:
7912 gcc_unreachable ();
7914 break;
7916 default:
7917 break;
7920 return NULL_TREE;
7923 /* Return OP, stripped of any conversions to wider types as much as is safe.
7924 Converting the value back to OP's type makes a value equivalent to OP.
7926 If FOR_TYPE is nonzero, we return a value which, if converted to
7927 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7929 OP must have integer, real or enumeral type. Pointers are not allowed!
7931 There are some cases where the obvious value we could return
7932 would regenerate to OP if converted to OP's type,
7933 but would not extend like OP to wider types.
7934 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7935 For example, if OP is (unsigned short)(signed char)-1,
7936 we avoid returning (signed char)-1 if FOR_TYPE is int,
7937 even though extending that to an unsigned short would regenerate OP,
7938 since the result of extending (signed char)-1 to (int)
7939 is different from (int) OP. */
7941 tree
7942 get_unwidened (tree op, tree for_type)
7944 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7945 tree type = TREE_TYPE (op);
7946 unsigned final_prec
7947 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7948 int uns
7949 = (for_type != 0 && for_type != type
7950 && final_prec > TYPE_PRECISION (type)
7951 && TYPE_UNSIGNED (type));
7952 tree win = op;
7954 while (CONVERT_EXPR_P (op))
7956 int bitschange;
7958 /* TYPE_PRECISION on vector types has different meaning
7959 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7960 so avoid them here. */
7961 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7962 break;
7964 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7965 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7967 /* Truncations are many-one so cannot be removed.
7968 Unless we are later going to truncate down even farther. */
7969 if (bitschange < 0
7970 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7971 break;
7973 /* See what's inside this conversion. If we decide to strip it,
7974 we will set WIN. */
7975 op = TREE_OPERAND (op, 0);
7977 /* If we have not stripped any zero-extensions (uns is 0),
7978 we can strip any kind of extension.
7979 If we have previously stripped a zero-extension,
7980 only zero-extensions can safely be stripped.
7981 Any extension can be stripped if the bits it would produce
7982 are all going to be discarded later by truncating to FOR_TYPE. */
7984 if (bitschange > 0)
7986 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7987 win = op;
7988 /* TYPE_UNSIGNED says whether this is a zero-extension.
7989 Let's avoid computing it if it does not affect WIN
7990 and if UNS will not be needed again. */
7991 if ((uns
7992 || CONVERT_EXPR_P (op))
7993 && TYPE_UNSIGNED (TREE_TYPE (op)))
7995 uns = 1;
7996 win = op;
8001 /* If we finally reach a constant see if it fits in sth smaller and
8002 in that case convert it. */
8003 if (TREE_CODE (win) == INTEGER_CST)
8005 tree wtype = TREE_TYPE (win);
8006 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8007 if (for_type)
8008 prec = MAX (prec, final_prec);
8009 if (prec < TYPE_PRECISION (wtype))
8011 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8012 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8013 win = fold_convert (t, win);
8017 return win;
8020 /* Return OP or a simpler expression for a narrower value
8021 which can be sign-extended or zero-extended to give back OP.
8022 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8023 or 0 if the value should be sign-extended. */
8025 tree
8026 get_narrower (tree op, int *unsignedp_ptr)
8028 int uns = 0;
8029 bool first = true;
8030 tree win = op;
8031 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8033 if (TREE_CODE (op) == COMPOUND_EXPR)
8036 op = TREE_OPERAND (op, 1);
8037 while (TREE_CODE (op) == COMPOUND_EXPR);
8038 tree ret = get_narrower (op, unsignedp_ptr);
8039 if (ret == op)
8040 return win;
8041 auto_vec <tree, 16> v;
8042 unsigned int i;
8043 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8044 op = TREE_OPERAND (op, 1))
8045 v.safe_push (op);
8046 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8047 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8048 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8049 ret);
8050 return ret;
8052 while (TREE_CODE (op) == NOP_EXPR)
8054 int bitschange
8055 = (TYPE_PRECISION (TREE_TYPE (op))
8056 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8058 /* Truncations are many-one so cannot be removed. */
8059 if (bitschange < 0)
8060 break;
8062 /* See what's inside this conversion. If we decide to strip it,
8063 we will set WIN. */
8065 if (bitschange > 0)
8067 op = TREE_OPERAND (op, 0);
8068 /* An extension: the outermost one can be stripped,
8069 but remember whether it is zero or sign extension. */
8070 if (first)
8071 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8072 /* Otherwise, if a sign extension has been stripped,
8073 only sign extensions can now be stripped;
8074 if a zero extension has been stripped, only zero-extensions. */
8075 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8076 break;
8077 first = false;
8079 else /* bitschange == 0 */
8081 /* A change in nominal type can always be stripped, but we must
8082 preserve the unsignedness. */
8083 if (first)
8084 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8085 first = false;
8086 op = TREE_OPERAND (op, 0);
8087 /* Keep trying to narrow, but don't assign op to win if it
8088 would turn an integral type into something else. */
8089 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8090 continue;
8093 win = op;
8096 if (TREE_CODE (op) == COMPONENT_REF
8097 /* Since type_for_size always gives an integer type. */
8098 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8099 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8100 /* Ensure field is laid out already. */
8101 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8102 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8104 unsigned HOST_WIDE_INT innerprec
8105 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8106 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8107 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8108 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8110 /* We can get this structure field in a narrower type that fits it,
8111 but the resulting extension to its nominal type (a fullword type)
8112 must satisfy the same conditions as for other extensions.
8114 Do this only for fields that are aligned (not bit-fields),
8115 because when bit-field insns will be used there is no
8116 advantage in doing this. */
8118 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8119 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8120 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8121 && type != 0)
8123 if (first)
8124 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8125 win = fold_convert (type, op);
8129 *unsignedp_ptr = uns;
8130 return win;
8133 /* Return true if integer constant C has a value that is permissible
8134 for TYPE, an integral type. */
8136 bool
8137 int_fits_type_p (const_tree c, const_tree type)
8139 tree type_low_bound, type_high_bound;
8140 bool ok_for_low_bound, ok_for_high_bound;
8141 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8143 /* Non-standard boolean types can have arbitrary precision but various
8144 transformations assume that they can only take values 0 and +/-1. */
8145 if (TREE_CODE (type) == BOOLEAN_TYPE)
8146 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8148 retry:
8149 type_low_bound = TYPE_MIN_VALUE (type);
8150 type_high_bound = TYPE_MAX_VALUE (type);
8152 /* If at least one bound of the type is a constant integer, we can check
8153 ourselves and maybe make a decision. If no such decision is possible, but
8154 this type is a subtype, try checking against that. Otherwise, use
8155 fits_to_tree_p, which checks against the precision.
8157 Compute the status for each possibly constant bound, and return if we see
8158 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8159 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8160 for "constant known to fit". */
8162 /* Check if c >= type_low_bound. */
8163 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8165 if (tree_int_cst_lt (c, type_low_bound))
8166 return false;
8167 ok_for_low_bound = true;
8169 else
8170 ok_for_low_bound = false;
8172 /* Check if c <= type_high_bound. */
8173 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8175 if (tree_int_cst_lt (type_high_bound, c))
8176 return false;
8177 ok_for_high_bound = true;
8179 else
8180 ok_for_high_bound = false;
8182 /* If the constant fits both bounds, the result is known. */
8183 if (ok_for_low_bound && ok_for_high_bound)
8184 return true;
8186 /* Perform some generic filtering which may allow making a decision
8187 even if the bounds are not constant. First, negative integers
8188 never fit in unsigned types, */
8189 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8190 return false;
8192 /* Second, narrower types always fit in wider ones. */
8193 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8194 return true;
8196 /* Third, unsigned integers with top bit set never fit signed types. */
8197 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8199 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8200 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8202 /* When a tree_cst is converted to a wide-int, the precision
8203 is taken from the type. However, if the precision of the
8204 mode underneath the type is smaller than that, it is
8205 possible that the value will not fit. The test below
8206 fails if any bit is set between the sign bit of the
8207 underlying mode and the top bit of the type. */
8208 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8209 return false;
8211 else if (wi::neg_p (wi::to_wide (c)))
8212 return false;
8215 /* If we haven't been able to decide at this point, there nothing more we
8216 can check ourselves here. Look at the base type if we have one and it
8217 has the same precision. */
8218 if (TREE_CODE (type) == INTEGER_TYPE
8219 && TREE_TYPE (type) != 0
8220 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8222 type = TREE_TYPE (type);
8223 goto retry;
8226 /* Or to fits_to_tree_p, if nothing else. */
8227 return wi::fits_to_tree_p (wi::to_wide (c), type);
8230 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8231 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8232 represented (assuming two's-complement arithmetic) within the bit
8233 precision of the type are returned instead. */
8235 void
8236 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8238 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8239 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8240 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8241 else
8243 if (TYPE_UNSIGNED (type))
8244 mpz_set_ui (min, 0);
8245 else
8247 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8248 wi::to_mpz (mn, min, SIGNED);
8252 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8253 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8254 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8255 else
8257 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8258 wi::to_mpz (mn, max, TYPE_SIGN (type));
8262 /* Return true if VAR is an automatic variable. */
8264 bool
8265 auto_var_p (const_tree var)
8267 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8268 || TREE_CODE (var) == PARM_DECL)
8269 && ! TREE_STATIC (var))
8270 || TREE_CODE (var) == RESULT_DECL);
8273 /* Return true if VAR is an automatic variable defined in function FN. */
8275 bool
8276 auto_var_in_fn_p (const_tree var, const_tree fn)
8278 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8279 && (auto_var_p (var)
8280 || TREE_CODE (var) == LABEL_DECL));
8283 /* Subprogram of following function. Called by walk_tree.
8285 Return *TP if it is an automatic variable or parameter of the
8286 function passed in as DATA. */
8288 static tree
8289 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8291 tree fn = (tree) data;
8293 if (TYPE_P (*tp))
8294 *walk_subtrees = 0;
8296 else if (DECL_P (*tp)
8297 && auto_var_in_fn_p (*tp, fn))
8298 return *tp;
8300 return NULL_TREE;
8303 /* Returns true if T is, contains, or refers to a type with variable
8304 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8305 arguments, but not the return type. If FN is nonzero, only return
8306 true if a modifier of the type or position of FN is a variable or
8307 parameter inside FN.
8309 This concept is more general than that of C99 'variably modified types':
8310 in C99, a struct type is never variably modified because a VLA may not
8311 appear as a structure member. However, in GNU C code like:
8313 struct S { int i[f()]; };
8315 is valid, and other languages may define similar constructs. */
8317 bool
8318 variably_modified_type_p (tree type, tree fn)
8320 tree t;
8322 /* Test if T is either variable (if FN is zero) or an expression containing
8323 a variable in FN. If TYPE isn't gimplified, return true also if
8324 gimplify_one_sizepos would gimplify the expression into a local
8325 variable. */
8326 #define RETURN_TRUE_IF_VAR(T) \
8327 do { tree _t = (T); \
8328 if (_t != NULL_TREE \
8329 && _t != error_mark_node \
8330 && !CONSTANT_CLASS_P (_t) \
8331 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8332 && (!fn \
8333 || (!TYPE_SIZES_GIMPLIFIED (type) \
8334 && (TREE_CODE (_t) != VAR_DECL \
8335 && !CONTAINS_PLACEHOLDER_P (_t))) \
8336 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8337 return true; } while (0)
8339 if (type == error_mark_node)
8340 return false;
8342 /* If TYPE itself has variable size, it is variably modified. */
8343 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8344 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8346 switch (TREE_CODE (type))
8348 case POINTER_TYPE:
8349 case REFERENCE_TYPE:
8350 case VECTOR_TYPE:
8351 /* Ada can have pointer types refering to themselves indirectly. */
8352 if (TREE_VISITED (type))
8353 return false;
8354 TREE_VISITED (type) = true;
8355 if (variably_modified_type_p (TREE_TYPE (type), fn))
8357 TREE_VISITED (type) = false;
8358 return true;
8360 TREE_VISITED (type) = false;
8361 break;
8363 case FUNCTION_TYPE:
8364 case METHOD_TYPE:
8365 /* If TYPE is a function type, it is variably modified if the
8366 return type is variably modified. */
8367 if (variably_modified_type_p (TREE_TYPE (type), fn))
8368 return true;
8369 break;
8371 case INTEGER_TYPE:
8372 case REAL_TYPE:
8373 case FIXED_POINT_TYPE:
8374 case ENUMERAL_TYPE:
8375 case BOOLEAN_TYPE:
8376 /* Scalar types are variably modified if their end points
8377 aren't constant. */
8378 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8379 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8380 break;
8382 case RECORD_TYPE:
8383 case UNION_TYPE:
8384 case QUAL_UNION_TYPE:
8385 /* We can't see if any of the fields are variably-modified by the
8386 definition we normally use, since that would produce infinite
8387 recursion via pointers. */
8388 /* This is variably modified if some field's type is. */
8389 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8390 if (TREE_CODE (t) == FIELD_DECL)
8392 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8393 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8394 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8396 /* If the type is a qualified union, then the DECL_QUALIFIER
8397 of fields can also be an expression containing a variable. */
8398 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8399 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8401 /* If the field is a qualified union, then it's only a container
8402 for what's inside so we look into it. That's necessary in LTO
8403 mode because the sizes of the field tested above have been set
8404 to PLACEHOLDER_EXPRs by free_lang_data. */
8405 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8406 && variably_modified_type_p (TREE_TYPE (t), fn))
8407 return true;
8409 break;
8411 case ARRAY_TYPE:
8412 /* Do not call ourselves to avoid infinite recursion. This is
8413 variably modified if the element type is. */
8414 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8415 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8416 break;
8418 default:
8419 break;
8422 /* The current language may have other cases to check, but in general,
8423 all other types are not variably modified. */
8424 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8426 #undef RETURN_TRUE_IF_VAR
8429 /* Given a DECL or TYPE, return the scope in which it was declared, or
8430 NULL_TREE if there is no containing scope. */
8432 tree
8433 get_containing_scope (const_tree t)
8435 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8438 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8440 const_tree
8441 get_ultimate_context (const_tree decl)
8443 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8445 if (TREE_CODE (decl) == BLOCK)
8446 decl = BLOCK_SUPERCONTEXT (decl);
8447 else
8448 decl = get_containing_scope (decl);
8450 return decl;
8453 /* Return the innermost context enclosing DECL that is
8454 a FUNCTION_DECL, or zero if none. */
8456 tree
8457 decl_function_context (const_tree decl)
8459 tree context;
8461 if (TREE_CODE (decl) == ERROR_MARK)
8462 return 0;
8464 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8465 where we look up the function at runtime. Such functions always take
8466 a first argument of type 'pointer to real context'.
8468 C++ should really be fixed to use DECL_CONTEXT for the real context,
8469 and use something else for the "virtual context". */
8470 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8471 context
8472 = TYPE_MAIN_VARIANT
8473 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8474 else
8475 context = DECL_CONTEXT (decl);
8477 while (context && TREE_CODE (context) != FUNCTION_DECL)
8479 if (TREE_CODE (context) == BLOCK)
8480 context = BLOCK_SUPERCONTEXT (context);
8481 else
8482 context = get_containing_scope (context);
8485 return context;
8488 /* Return the innermost context enclosing DECL that is
8489 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8490 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8492 tree
8493 decl_type_context (const_tree decl)
8495 tree context = DECL_CONTEXT (decl);
8497 while (context)
8498 switch (TREE_CODE (context))
8500 case NAMESPACE_DECL:
8501 case TRANSLATION_UNIT_DECL:
8502 return NULL_TREE;
8504 case RECORD_TYPE:
8505 case UNION_TYPE:
8506 case QUAL_UNION_TYPE:
8507 return context;
8509 case TYPE_DECL:
8510 case FUNCTION_DECL:
8511 context = DECL_CONTEXT (context);
8512 break;
8514 case BLOCK:
8515 context = BLOCK_SUPERCONTEXT (context);
8516 break;
8518 default:
8519 gcc_unreachable ();
8522 return NULL_TREE;
8525 /* CALL is a CALL_EXPR. Return the declaration for the function
8526 called, or NULL_TREE if the called function cannot be
8527 determined. */
8529 tree
8530 get_callee_fndecl (const_tree call)
8532 tree addr;
8534 if (call == error_mark_node)
8535 return error_mark_node;
8537 /* It's invalid to call this function with anything but a
8538 CALL_EXPR. */
8539 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8541 /* The first operand to the CALL is the address of the function
8542 called. */
8543 addr = CALL_EXPR_FN (call);
8545 /* If there is no function, return early. */
8546 if (addr == NULL_TREE)
8547 return NULL_TREE;
8549 STRIP_NOPS (addr);
8551 /* If this is a readonly function pointer, extract its initial value. */
8552 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8553 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8554 && DECL_INITIAL (addr))
8555 addr = DECL_INITIAL (addr);
8557 /* If the address is just `&f' for some function `f', then we know
8558 that `f' is being called. */
8559 if (TREE_CODE (addr) == ADDR_EXPR
8560 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8561 return TREE_OPERAND (addr, 0);
8563 /* We couldn't figure out what was being called. */
8564 return NULL_TREE;
8567 /* Return true when STMTs arguments and return value match those of FNDECL,
8568 a decl of a builtin function. */
8570 static bool
8571 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8573 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8575 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8576 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8577 fndecl = decl;
8579 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8580 if (gimple_form
8581 ? !useless_type_conversion_p (TREE_TYPE (call),
8582 TREE_TYPE (TREE_TYPE (fndecl)))
8583 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8584 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8585 return false;
8587 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8588 unsigned nargs = call_expr_nargs (call);
8589 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8591 /* Variadic args follow. */
8592 if (!targs)
8593 return true;
8594 tree arg = CALL_EXPR_ARG (call, i);
8595 tree type = TREE_VALUE (targs);
8596 if (gimple_form
8597 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8598 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8600 /* For pointer arguments be more forgiving, e.g. due to
8601 FILE * vs. fileptr_type_node, or say char * vs. const char *
8602 differences etc. */
8603 if (!gimple_form
8604 && POINTER_TYPE_P (type)
8605 && POINTER_TYPE_P (TREE_TYPE (arg))
8606 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8607 continue;
8608 /* char/short integral arguments are promoted to int
8609 by several frontends if targetm.calls.promote_prototypes
8610 is true. Allow such promotion too. */
8611 if (INTEGRAL_TYPE_P (type)
8612 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8613 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8614 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8615 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8616 && (gimple_form
8617 ? useless_type_conversion_p (integer_type_node,
8618 TREE_TYPE (arg))
8619 : tree_nop_conversion_p (integer_type_node,
8620 TREE_TYPE (arg))))
8621 continue;
8622 return false;
8625 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8626 return false;
8627 return true;
8630 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8631 return the associated function code, otherwise return CFN_LAST. */
8633 combined_fn
8634 get_call_combined_fn (const_tree call)
8636 /* It's invalid to call this function with anything but a CALL_EXPR. */
8637 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8639 if (!CALL_EXPR_FN (call))
8640 return as_combined_fn (CALL_EXPR_IFN (call));
8642 tree fndecl = get_callee_fndecl (call);
8643 if (fndecl
8644 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8645 && tree_builtin_call_types_compatible_p (call, fndecl))
8646 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8648 return CFN_LAST;
8651 /* Comparator of indices based on tree_node_counts. */
8653 static int
8654 tree_nodes_cmp (const void *p1, const void *p2)
8656 const unsigned *n1 = (const unsigned *)p1;
8657 const unsigned *n2 = (const unsigned *)p2;
8659 return tree_node_counts[*n1] - tree_node_counts[*n2];
8662 /* Comparator of indices based on tree_code_counts. */
8664 static int
8665 tree_codes_cmp (const void *p1, const void *p2)
8667 const unsigned *n1 = (const unsigned *)p1;
8668 const unsigned *n2 = (const unsigned *)p2;
8670 return tree_code_counts[*n1] - tree_code_counts[*n2];
8673 #define TREE_MEM_USAGE_SPACES 40
8675 /* Print debugging information about tree nodes generated during the compile,
8676 and any language-specific information. */
8678 void
8679 dump_tree_statistics (void)
8681 if (GATHER_STATISTICS)
8683 uint64_t total_nodes, total_bytes;
8684 fprintf (stderr, "\nKind Nodes Bytes\n");
8685 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8686 total_nodes = total_bytes = 0;
8689 auto_vec<unsigned> indices (all_kinds);
8690 for (unsigned i = 0; i < all_kinds; i++)
8691 indices.quick_push (i);
8692 indices.qsort (tree_nodes_cmp);
8694 for (unsigned i = 0; i < (int) all_kinds; i++)
8696 unsigned j = indices[i];
8697 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8698 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8699 SIZE_AMOUNT (tree_node_sizes[j]));
8700 total_nodes += tree_node_counts[j];
8701 total_bytes += tree_node_sizes[j];
8703 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8704 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8705 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8706 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8710 fprintf (stderr, "Code Nodes\n");
8711 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8713 auto_vec<unsigned> indices (MAX_TREE_CODES);
8714 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8715 indices.quick_push (i);
8716 indices.qsort (tree_codes_cmp);
8718 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8720 unsigned j = indices[i];
8721 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8722 get_tree_code_name ((enum tree_code) j),
8723 SIZE_AMOUNT (tree_code_counts[j]));
8725 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8726 fprintf (stderr, "\n");
8727 ssanames_print_statistics ();
8728 fprintf (stderr, "\n");
8729 phinodes_print_statistics ();
8730 fprintf (stderr, "\n");
8733 else
8734 fprintf (stderr, "(No per-node statistics)\n");
8736 print_type_hash_statistics ();
8737 print_debug_expr_statistics ();
8738 print_value_expr_statistics ();
8739 lang_hooks.print_statistics ();
8742 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8744 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8746 unsigned
8747 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8749 /* This relies on the raw feedback's top 4 bits being zero. */
8750 #define FEEDBACK(X) ((X) * 0x04c11db7)
8751 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8752 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8753 static const unsigned syndromes[16] =
8755 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8756 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8757 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8758 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8760 #undef FEEDBACK
8761 #undef SYNDROME
8763 value <<= (32 - bytes * 8);
8764 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8766 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8768 chksum = (chksum << 4) ^ feedback;
8771 return chksum;
8774 /* Generate a crc32 of a string. */
8776 unsigned
8777 crc32_string (unsigned chksum, const char *string)
8780 chksum = crc32_byte (chksum, *string);
8781 while (*string++);
8782 return chksum;
8785 /* P is a string that will be used in a symbol. Mask out any characters
8786 that are not valid in that context. */
8788 void
8789 clean_symbol_name (char *p)
8791 for (; *p; p++)
8792 if (! (ISALNUM (*p)
8793 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8794 || *p == '$'
8795 #endif
8796 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8797 || *p == '.'
8798 #endif
8800 *p = '_';
8803 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8805 /* Create a unique anonymous identifier. The identifier is still a
8806 valid assembly label. */
8808 tree
8809 make_anon_name ()
8811 const char *fmt =
8812 #if !defined (NO_DOT_IN_LABEL)
8814 #elif !defined (NO_DOLLAR_IN_LABEL)
8816 #else
8818 #endif
8819 "_anon_%d";
8821 char buf[24];
8822 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8823 gcc_checking_assert (len < int (sizeof (buf)));
8825 tree id = get_identifier_with_length (buf, len);
8826 IDENTIFIER_ANON_P (id) = true;
8828 return id;
8831 /* Generate a name for a special-purpose function.
8832 The generated name may need to be unique across the whole link.
8833 Changes to this function may also require corresponding changes to
8834 xstrdup_mask_random.
8835 TYPE is some string to identify the purpose of this function to the
8836 linker or collect2; it must start with an uppercase letter,
8837 one of:
8838 I - for constructors
8839 D - for destructors
8840 N - for C++ anonymous namespaces
8841 F - for DWARF unwind frame information. */
8843 tree
8844 get_file_function_name (const char *type)
8846 char *buf;
8847 const char *p;
8848 char *q;
8850 /* If we already have a name we know to be unique, just use that. */
8851 if (first_global_object_name)
8852 p = q = ASTRDUP (first_global_object_name);
8853 /* If the target is handling the constructors/destructors, they
8854 will be local to this file and the name is only necessary for
8855 debugging purposes.
8856 We also assign sub_I and sub_D sufixes to constructors called from
8857 the global static constructors. These are always local. */
8858 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8859 || (startswith (type, "sub_")
8860 && (type[4] == 'I' || type[4] == 'D')))
8862 const char *file = main_input_filename;
8863 if (! file)
8864 file = LOCATION_FILE (input_location);
8865 /* Just use the file's basename, because the full pathname
8866 might be quite long. */
8867 p = q = ASTRDUP (lbasename (file));
8869 else
8871 /* Otherwise, the name must be unique across the entire link.
8872 We don't have anything that we know to be unique to this translation
8873 unit, so use what we do have and throw in some randomness. */
8874 unsigned len;
8875 const char *name = weak_global_object_name;
8876 const char *file = main_input_filename;
8878 if (! name)
8879 name = "";
8880 if (! file)
8881 file = LOCATION_FILE (input_location);
8883 len = strlen (file);
8884 q = (char *) alloca (9 + 19 + len + 1);
8885 memcpy (q, file, len + 1);
8887 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8888 crc32_string (0, name), get_random_seed (false));
8890 p = q;
8893 clean_symbol_name (q);
8894 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8895 + strlen (type));
8897 /* Set up the name of the file-level functions we may need.
8898 Use a global object (which is already required to be unique over
8899 the program) rather than the file name (which imposes extra
8900 constraints). */
8901 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8903 return get_identifier (buf);
8906 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8908 /* Complain that the tree code of NODE does not match the expected 0
8909 terminated list of trailing codes. The trailing code list can be
8910 empty, for a more vague error message. FILE, LINE, and FUNCTION
8911 are of the caller. */
8913 void
8914 tree_check_failed (const_tree node, const char *file,
8915 int line, const char *function, ...)
8917 va_list args;
8918 const char *buffer;
8919 unsigned length = 0;
8920 enum tree_code code;
8922 va_start (args, function);
8923 while ((code = (enum tree_code) va_arg (args, int)))
8924 length += 4 + strlen (get_tree_code_name (code));
8925 va_end (args);
8926 if (length)
8928 char *tmp;
8929 va_start (args, function);
8930 length += strlen ("expected ");
8931 buffer = tmp = (char *) alloca (length);
8932 length = 0;
8933 while ((code = (enum tree_code) va_arg (args, int)))
8935 const char *prefix = length ? " or " : "expected ";
8937 strcpy (tmp + length, prefix);
8938 length += strlen (prefix);
8939 strcpy (tmp + length, get_tree_code_name (code));
8940 length += strlen (get_tree_code_name (code));
8942 va_end (args);
8944 else
8945 buffer = "unexpected node";
8947 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8948 buffer, get_tree_code_name (TREE_CODE (node)),
8949 function, trim_filename (file), line);
8952 /* Complain that the tree code of NODE does match the expected 0
8953 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8954 the caller. */
8956 void
8957 tree_not_check_failed (const_tree node, const char *file,
8958 int line, const char *function, ...)
8960 va_list args;
8961 char *buffer;
8962 unsigned length = 0;
8963 enum tree_code code;
8965 va_start (args, function);
8966 while ((code = (enum tree_code) va_arg (args, int)))
8967 length += 4 + strlen (get_tree_code_name (code));
8968 va_end (args);
8969 va_start (args, function);
8970 buffer = (char *) alloca (length);
8971 length = 0;
8972 while ((code = (enum tree_code) va_arg (args, int)))
8974 if (length)
8976 strcpy (buffer + length, " or ");
8977 length += 4;
8979 strcpy (buffer + length, get_tree_code_name (code));
8980 length += strlen (get_tree_code_name (code));
8982 va_end (args);
8984 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8985 buffer, get_tree_code_name (TREE_CODE (node)),
8986 function, trim_filename (file), line);
8989 /* Similar to tree_check_failed, except that we check for a class of tree
8990 code, given in CL. */
8992 void
8993 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8994 const char *file, int line, const char *function)
8996 internal_error
8997 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8998 TREE_CODE_CLASS_STRING (cl),
8999 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9000 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9003 /* Similar to tree_check_failed, except that instead of specifying a
9004 dozen codes, use the knowledge that they're all sequential. */
9006 void
9007 tree_range_check_failed (const_tree node, const char *file, int line,
9008 const char *function, enum tree_code c1,
9009 enum tree_code c2)
9011 char *buffer;
9012 unsigned length = 0;
9013 unsigned int c;
9015 for (c = c1; c <= c2; ++c)
9016 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9018 length += strlen ("expected ");
9019 buffer = (char *) alloca (length);
9020 length = 0;
9022 for (c = c1; c <= c2; ++c)
9024 const char *prefix = length ? " or " : "expected ";
9026 strcpy (buffer + length, prefix);
9027 length += strlen (prefix);
9028 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9029 length += strlen (get_tree_code_name ((enum tree_code) c));
9032 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9033 buffer, get_tree_code_name (TREE_CODE (node)),
9034 function, trim_filename (file), line);
9038 /* Similar to tree_check_failed, except that we check that a tree does
9039 not have the specified code, given in CL. */
9041 void
9042 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9043 const char *file, int line, const char *function)
9045 internal_error
9046 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9047 TREE_CODE_CLASS_STRING (cl),
9048 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9049 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9053 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9055 void
9056 omp_clause_check_failed (const_tree node, const char *file, int line,
9057 const char *function, enum omp_clause_code code)
9059 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9060 "in %s, at %s:%d",
9061 omp_clause_code_name[code],
9062 get_tree_code_name (TREE_CODE (node)),
9063 function, trim_filename (file), line);
9067 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9069 void
9070 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9071 const char *function, enum omp_clause_code c1,
9072 enum omp_clause_code c2)
9074 char *buffer;
9075 unsigned length = 0;
9076 unsigned int c;
9078 for (c = c1; c <= c2; ++c)
9079 length += 4 + strlen (omp_clause_code_name[c]);
9081 length += strlen ("expected ");
9082 buffer = (char *) alloca (length);
9083 length = 0;
9085 for (c = c1; c <= c2; ++c)
9087 const char *prefix = length ? " or " : "expected ";
9089 strcpy (buffer + length, prefix);
9090 length += strlen (prefix);
9091 strcpy (buffer + length, omp_clause_code_name[c]);
9092 length += strlen (omp_clause_code_name[c]);
9095 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9096 buffer, omp_clause_code_name[TREE_CODE (node)],
9097 function, trim_filename (file), line);
9101 #undef DEFTREESTRUCT
9102 #define DEFTREESTRUCT(VAL, NAME) NAME,
9104 static const char *ts_enum_names[] = {
9105 #include "treestruct.def"
9107 #undef DEFTREESTRUCT
9109 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9111 /* Similar to tree_class_check_failed, except that we check for
9112 whether CODE contains the tree structure identified by EN. */
9114 void
9115 tree_contains_struct_check_failed (const_tree node,
9116 const enum tree_node_structure_enum en,
9117 const char *file, int line,
9118 const char *function)
9120 internal_error
9121 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9122 TS_ENUM_NAME (en),
9123 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9127 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9128 (dynamically sized) vector. */
9130 void
9131 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9132 const char *function)
9134 internal_error
9135 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9136 "at %s:%d",
9137 idx + 1, len, function, trim_filename (file), line);
9140 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9141 (dynamically sized) vector. */
9143 void
9144 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9145 const char *function)
9147 internal_error
9148 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9149 idx + 1, len, function, trim_filename (file), line);
9152 /* Similar to above, except that the check is for the bounds of the operand
9153 vector of an expression node EXP. */
9155 void
9156 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9157 int line, const char *function)
9159 enum tree_code code = TREE_CODE (exp);
9160 internal_error
9161 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9162 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9163 function, trim_filename (file), line);
9166 /* Similar to above, except that the check is for the number of
9167 operands of an OMP_CLAUSE node. */
9169 void
9170 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9171 int line, const char *function)
9173 internal_error
9174 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9175 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9176 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9177 trim_filename (file), line);
9179 #endif /* ENABLE_TREE_CHECKING */
9181 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9182 and mapped to the machine mode MODE. Initialize its fields and build
9183 the information necessary for debugging output. */
9185 static tree
9186 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9188 tree t;
9189 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9191 t = make_node (VECTOR_TYPE);
9192 TREE_TYPE (t) = mv_innertype;
9193 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9194 SET_TYPE_MODE (t, mode);
9196 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9197 SET_TYPE_STRUCTURAL_EQUALITY (t);
9198 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9199 || mode != VOIDmode)
9200 && !VECTOR_BOOLEAN_TYPE_P (t))
9201 TYPE_CANONICAL (t)
9202 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9204 layout_type (t);
9206 hashval_t hash = type_hash_canon_hash (t);
9207 t = type_hash_canon (hash, t);
9209 /* We have built a main variant, based on the main variant of the
9210 inner type. Use it to build the variant we return. */
9211 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9212 && TREE_TYPE (t) != innertype)
9213 return build_type_attribute_qual_variant (t,
9214 TYPE_ATTRIBUTES (innertype),
9215 TYPE_QUALS (innertype));
9217 return t;
9220 static tree
9221 make_or_reuse_type (unsigned size, int unsignedp)
9223 int i;
9225 if (size == INT_TYPE_SIZE)
9226 return unsignedp ? unsigned_type_node : integer_type_node;
9227 if (size == CHAR_TYPE_SIZE)
9228 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9229 if (size == SHORT_TYPE_SIZE)
9230 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9231 if (size == LONG_TYPE_SIZE)
9232 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9233 if (size == LONG_LONG_TYPE_SIZE)
9234 return (unsignedp ? long_long_unsigned_type_node
9235 : long_long_integer_type_node);
9237 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9238 if (size == int_n_data[i].bitsize
9239 && int_n_enabled_p[i])
9240 return (unsignedp ? int_n_trees[i].unsigned_type
9241 : int_n_trees[i].signed_type);
9243 if (unsignedp)
9244 return make_unsigned_type (size);
9245 else
9246 return make_signed_type (size);
9249 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9251 static tree
9252 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9254 if (satp)
9256 if (size == SHORT_FRACT_TYPE_SIZE)
9257 return unsignedp ? sat_unsigned_short_fract_type_node
9258 : sat_short_fract_type_node;
9259 if (size == FRACT_TYPE_SIZE)
9260 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9261 if (size == LONG_FRACT_TYPE_SIZE)
9262 return unsignedp ? sat_unsigned_long_fract_type_node
9263 : sat_long_fract_type_node;
9264 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9265 return unsignedp ? sat_unsigned_long_long_fract_type_node
9266 : sat_long_long_fract_type_node;
9268 else
9270 if (size == SHORT_FRACT_TYPE_SIZE)
9271 return unsignedp ? unsigned_short_fract_type_node
9272 : short_fract_type_node;
9273 if (size == FRACT_TYPE_SIZE)
9274 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9275 if (size == LONG_FRACT_TYPE_SIZE)
9276 return unsignedp ? unsigned_long_fract_type_node
9277 : long_fract_type_node;
9278 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9279 return unsignedp ? unsigned_long_long_fract_type_node
9280 : long_long_fract_type_node;
9283 return make_fract_type (size, unsignedp, satp);
9286 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9288 static tree
9289 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9291 if (satp)
9293 if (size == SHORT_ACCUM_TYPE_SIZE)
9294 return unsignedp ? sat_unsigned_short_accum_type_node
9295 : sat_short_accum_type_node;
9296 if (size == ACCUM_TYPE_SIZE)
9297 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9298 if (size == LONG_ACCUM_TYPE_SIZE)
9299 return unsignedp ? sat_unsigned_long_accum_type_node
9300 : sat_long_accum_type_node;
9301 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9302 return unsignedp ? sat_unsigned_long_long_accum_type_node
9303 : sat_long_long_accum_type_node;
9305 else
9307 if (size == SHORT_ACCUM_TYPE_SIZE)
9308 return unsignedp ? unsigned_short_accum_type_node
9309 : short_accum_type_node;
9310 if (size == ACCUM_TYPE_SIZE)
9311 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9312 if (size == LONG_ACCUM_TYPE_SIZE)
9313 return unsignedp ? unsigned_long_accum_type_node
9314 : long_accum_type_node;
9315 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9316 return unsignedp ? unsigned_long_long_accum_type_node
9317 : long_long_accum_type_node;
9320 return make_accum_type (size, unsignedp, satp);
9324 /* Create an atomic variant node for TYPE. This routine is called
9325 during initialization of data types to create the 5 basic atomic
9326 types. The generic build_variant_type function requires these to
9327 already be set up in order to function properly, so cannot be
9328 called from there. If ALIGN is non-zero, then ensure alignment is
9329 overridden to this value. */
9331 static tree
9332 build_atomic_base (tree type, unsigned int align)
9334 tree t;
9336 /* Make sure its not already registered. */
9337 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9338 return t;
9340 t = build_variant_type_copy (type);
9341 set_type_quals (t, TYPE_QUAL_ATOMIC);
9343 if (align)
9344 SET_TYPE_ALIGN (t, align);
9346 return t;
9349 /* Information about the _FloatN and _FloatNx types. This must be in
9350 the same order as the corresponding TI_* enum values. */
9351 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9353 { 16, false },
9354 { 32, false },
9355 { 64, false },
9356 { 128, false },
9357 { 32, true },
9358 { 64, true },
9359 { 128, true },
9363 /* Create nodes for all integer types (and error_mark_node) using the sizes
9364 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9366 void
9367 build_common_tree_nodes (bool signed_char)
9369 int i;
9371 error_mark_node = make_node (ERROR_MARK);
9372 TREE_TYPE (error_mark_node) = error_mark_node;
9374 initialize_sizetypes ();
9376 /* Define both `signed char' and `unsigned char'. */
9377 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9378 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9379 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9380 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9382 /* Define `char', which is like either `signed char' or `unsigned char'
9383 but not the same as either. */
9384 char_type_node
9385 = (signed_char
9386 ? make_signed_type (CHAR_TYPE_SIZE)
9387 : make_unsigned_type (CHAR_TYPE_SIZE));
9388 TYPE_STRING_FLAG (char_type_node) = 1;
9390 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9391 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9392 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9393 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9394 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9395 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9396 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9397 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9399 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9401 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9402 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9404 if (int_n_enabled_p[i])
9406 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9407 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9411 /* Define a boolean type. This type only represents boolean values but
9412 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9413 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9414 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9415 TYPE_PRECISION (boolean_type_node) = 1;
9416 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9418 /* Define what type to use for size_t. */
9419 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9420 size_type_node = unsigned_type_node;
9421 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9422 size_type_node = long_unsigned_type_node;
9423 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9424 size_type_node = long_long_unsigned_type_node;
9425 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9426 size_type_node = short_unsigned_type_node;
9427 else
9429 int i;
9431 size_type_node = NULL_TREE;
9432 for (i = 0; i < NUM_INT_N_ENTS; i++)
9433 if (int_n_enabled_p[i])
9435 char name[50], altname[50];
9436 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9437 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9439 if (strcmp (name, SIZE_TYPE) == 0
9440 || strcmp (altname, SIZE_TYPE) == 0)
9442 size_type_node = int_n_trees[i].unsigned_type;
9445 if (size_type_node == NULL_TREE)
9446 gcc_unreachable ();
9449 /* Define what type to use for ptrdiff_t. */
9450 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9451 ptrdiff_type_node = integer_type_node;
9452 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9453 ptrdiff_type_node = long_integer_type_node;
9454 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9455 ptrdiff_type_node = long_long_integer_type_node;
9456 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9457 ptrdiff_type_node = short_integer_type_node;
9458 else
9460 ptrdiff_type_node = NULL_TREE;
9461 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9462 if (int_n_enabled_p[i])
9464 char name[50], altname[50];
9465 sprintf (name, "__int%d", int_n_data[i].bitsize);
9466 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9468 if (strcmp (name, PTRDIFF_TYPE) == 0
9469 || strcmp (altname, PTRDIFF_TYPE) == 0)
9470 ptrdiff_type_node = int_n_trees[i].signed_type;
9472 if (ptrdiff_type_node == NULL_TREE)
9473 gcc_unreachable ();
9476 /* Fill in the rest of the sized types. Reuse existing type nodes
9477 when possible. */
9478 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9479 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9480 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9481 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9482 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9484 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9485 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9486 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9487 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9488 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9490 /* Don't call build_qualified type for atomics. That routine does
9491 special processing for atomics, and until they are initialized
9492 it's better not to make that call.
9494 Check to see if there is a target override for atomic types. */
9496 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9497 targetm.atomic_align_for_mode (QImode));
9498 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9499 targetm.atomic_align_for_mode (HImode));
9500 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9501 targetm.atomic_align_for_mode (SImode));
9502 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9503 targetm.atomic_align_for_mode (DImode));
9504 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9505 targetm.atomic_align_for_mode (TImode));
9507 access_public_node = get_identifier ("public");
9508 access_protected_node = get_identifier ("protected");
9509 access_private_node = get_identifier ("private");
9511 /* Define these next since types below may used them. */
9512 integer_zero_node = build_int_cst (integer_type_node, 0);
9513 integer_one_node = build_int_cst (integer_type_node, 1);
9514 integer_three_node = build_int_cst (integer_type_node, 3);
9515 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9517 size_zero_node = size_int (0);
9518 size_one_node = size_int (1);
9519 bitsize_zero_node = bitsize_int (0);
9520 bitsize_one_node = bitsize_int (1);
9521 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9523 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9524 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9526 void_type_node = make_node (VOID_TYPE);
9527 layout_type (void_type_node);
9529 /* We are not going to have real types in C with less than byte alignment,
9530 so we might as well not have any types that claim to have it. */
9531 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9532 TYPE_USER_ALIGN (void_type_node) = 0;
9534 void_node = make_node (VOID_CST);
9535 TREE_TYPE (void_node) = void_type_node;
9537 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9539 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9540 layout_type (TREE_TYPE (null_pointer_node));
9542 ptr_type_node = build_pointer_type (void_type_node);
9543 const_ptr_type_node
9544 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9545 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9546 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9548 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9550 float_type_node = make_node (REAL_TYPE);
9551 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9552 layout_type (float_type_node);
9554 double_type_node = make_node (REAL_TYPE);
9555 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9556 layout_type (double_type_node);
9558 long_double_type_node = make_node (REAL_TYPE);
9559 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9560 layout_type (long_double_type_node);
9562 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9564 int n = floatn_nx_types[i].n;
9565 bool extended = floatn_nx_types[i].extended;
9566 scalar_float_mode mode;
9567 if (!targetm.floatn_mode (n, extended).exists (&mode))
9568 continue;
9569 int precision = GET_MODE_PRECISION (mode);
9570 /* Work around the rs6000 KFmode having precision 113 not
9571 128. */
9572 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9573 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9574 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9575 if (!extended)
9576 gcc_assert (min_precision == n);
9577 if (precision < min_precision)
9578 precision = min_precision;
9579 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9580 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9581 layout_type (FLOATN_NX_TYPE_NODE (i));
9582 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9584 float128t_type_node = float128_type_node;
9585 #ifdef HAVE_BFmode
9586 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9587 && targetm.scalar_mode_supported_p (BFmode)
9588 && targetm.libgcc_floating_mode_supported_p (BFmode))
9590 bfloat16_type_node = make_node (REAL_TYPE);
9591 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9592 layout_type (bfloat16_type_node);
9593 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9595 #endif
9597 float_ptr_type_node = build_pointer_type (float_type_node);
9598 double_ptr_type_node = build_pointer_type (double_type_node);
9599 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9600 integer_ptr_type_node = build_pointer_type (integer_type_node);
9602 /* Fixed size integer types. */
9603 uint16_type_node = make_or_reuse_type (16, 1);
9604 uint32_type_node = make_or_reuse_type (32, 1);
9605 uint64_type_node = make_or_reuse_type (64, 1);
9606 if (targetm.scalar_mode_supported_p (TImode))
9607 uint128_type_node = make_or_reuse_type (128, 1);
9609 /* Decimal float types. */
9610 if (targetm.decimal_float_supported_p ())
9612 dfloat32_type_node = make_node (REAL_TYPE);
9613 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9614 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9615 layout_type (dfloat32_type_node);
9617 dfloat64_type_node = make_node (REAL_TYPE);
9618 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9619 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9620 layout_type (dfloat64_type_node);
9622 dfloat128_type_node = make_node (REAL_TYPE);
9623 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9624 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9625 layout_type (dfloat128_type_node);
9628 complex_integer_type_node = build_complex_type (integer_type_node, true);
9629 complex_float_type_node = build_complex_type (float_type_node, true);
9630 complex_double_type_node = build_complex_type (double_type_node, true);
9631 complex_long_double_type_node = build_complex_type (long_double_type_node,
9632 true);
9634 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9636 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9637 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9638 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9641 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9642 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9643 sat_ ## KIND ## _type_node = \
9644 make_sat_signed_ ## KIND ## _type (SIZE); \
9645 sat_unsigned_ ## KIND ## _type_node = \
9646 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9647 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9648 unsigned_ ## KIND ## _type_node = \
9649 make_unsigned_ ## KIND ## _type (SIZE);
9651 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9652 sat_ ## WIDTH ## KIND ## _type_node = \
9653 make_sat_signed_ ## KIND ## _type (SIZE); \
9654 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9655 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9656 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9657 unsigned_ ## WIDTH ## KIND ## _type_node = \
9658 make_unsigned_ ## KIND ## _type (SIZE);
9660 /* Make fixed-point type nodes based on four different widths. */
9661 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9662 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9663 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9664 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9665 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9667 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9668 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9669 NAME ## _type_node = \
9670 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9671 u ## NAME ## _type_node = \
9672 make_or_reuse_unsigned_ ## KIND ## _type \
9673 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9674 sat_ ## NAME ## _type_node = \
9675 make_or_reuse_sat_signed_ ## KIND ## _type \
9676 (GET_MODE_BITSIZE (MODE ## mode)); \
9677 sat_u ## NAME ## _type_node = \
9678 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9679 (GET_MODE_BITSIZE (U ## MODE ## mode));
9681 /* Fixed-point type and mode nodes. */
9682 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9683 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9684 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9685 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9686 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9687 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9688 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9689 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9690 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9691 MAKE_FIXED_MODE_NODE (accum, da, DA)
9692 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9695 tree t = targetm.build_builtin_va_list ();
9697 /* Many back-ends define record types without setting TYPE_NAME.
9698 If we copied the record type here, we'd keep the original
9699 record type without a name. This breaks name mangling. So,
9700 don't copy record types and let c_common_nodes_and_builtins()
9701 declare the type to be __builtin_va_list. */
9702 if (TREE_CODE (t) != RECORD_TYPE)
9703 t = build_variant_type_copy (t);
9705 va_list_type_node = t;
9708 /* SCEV analyzer global shared trees. */
9709 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9710 TREE_TYPE (chrec_dont_know) = void_type_node;
9711 chrec_known = make_node (SCEV_KNOWN);
9712 TREE_TYPE (chrec_known) = void_type_node;
9715 /* Modify DECL for given flags.
9716 TM_PURE attribute is set only on types, so the function will modify
9717 DECL's type when ECF_TM_PURE is used. */
9719 void
9720 set_call_expr_flags (tree decl, int flags)
9722 if (flags & ECF_NOTHROW)
9723 TREE_NOTHROW (decl) = 1;
9724 if (flags & ECF_CONST)
9725 TREE_READONLY (decl) = 1;
9726 if (flags & ECF_PURE)
9727 DECL_PURE_P (decl) = 1;
9728 if (flags & ECF_LOOPING_CONST_OR_PURE)
9729 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9730 if (flags & ECF_NOVOPS)
9731 DECL_IS_NOVOPS (decl) = 1;
9732 if (flags & ECF_NORETURN)
9733 TREE_THIS_VOLATILE (decl) = 1;
9734 if (flags & ECF_MALLOC)
9735 DECL_IS_MALLOC (decl) = 1;
9736 if (flags & ECF_RETURNS_TWICE)
9737 DECL_IS_RETURNS_TWICE (decl) = 1;
9738 if (flags & ECF_LEAF)
9739 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9740 NULL, DECL_ATTRIBUTES (decl));
9741 if (flags & ECF_COLD)
9742 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9743 NULL, DECL_ATTRIBUTES (decl));
9744 if (flags & ECF_RET1)
9745 DECL_ATTRIBUTES (decl)
9746 = tree_cons (get_identifier ("fn spec"),
9747 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9748 DECL_ATTRIBUTES (decl));
9749 if ((flags & ECF_TM_PURE) && flag_tm)
9750 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9751 /* Looping const or pure is implied by noreturn.
9752 There is currently no way to declare looping const or looping pure alone. */
9753 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9754 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9758 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9760 static void
9761 local_define_builtin (const char *name, tree type, enum built_in_function code,
9762 const char *library_name, int ecf_flags)
9764 tree decl;
9766 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9767 library_name, NULL_TREE);
9768 set_call_expr_flags (decl, ecf_flags);
9770 set_builtin_decl (code, decl, true);
9773 /* Call this function after instantiating all builtins that the language
9774 front end cares about. This will build the rest of the builtins
9775 and internal functions that are relied upon by the tree optimizers and
9776 the middle-end. */
9778 void
9779 build_common_builtin_nodes (void)
9781 tree tmp, ftype;
9782 int ecf_flags;
9784 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9786 ftype = build_function_type_list (void_type_node,
9787 ptr_type_node,
9788 ptr_type_node,
9789 integer_type_node,
9790 NULL_TREE);
9791 local_define_builtin ("__builtin_clear_padding", ftype,
9792 BUILT_IN_CLEAR_PADDING,
9793 "__builtin_clear_padding",
9794 ECF_LEAF | ECF_NOTHROW);
9797 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9798 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9799 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9800 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9802 ftype = build_function_type (void_type_node, void_list_node);
9803 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9804 local_define_builtin ("__builtin_unreachable", ftype,
9805 BUILT_IN_UNREACHABLE,
9806 "__builtin_unreachable",
9807 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9808 | ECF_CONST | ECF_COLD);
9809 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9810 local_define_builtin ("__builtin_unreachable trap", ftype,
9811 BUILT_IN_UNREACHABLE_TRAP,
9812 "__builtin_unreachable trap",
9813 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9814 | ECF_CONST | ECF_COLD);
9815 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9816 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9817 "abort",
9818 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9819 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9820 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9821 "__builtin_trap",
9822 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9825 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9826 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9828 ftype = build_function_type_list (ptr_type_node,
9829 ptr_type_node, const_ptr_type_node,
9830 size_type_node, NULL_TREE);
9832 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9833 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9834 "memcpy", ECF_NOTHROW | ECF_LEAF);
9835 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9836 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9837 "memmove", ECF_NOTHROW | ECF_LEAF);
9840 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9842 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9843 const_ptr_type_node, size_type_node,
9844 NULL_TREE);
9845 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9846 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9849 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9851 ftype = build_function_type_list (ptr_type_node,
9852 ptr_type_node, integer_type_node,
9853 size_type_node, NULL_TREE);
9854 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9855 "memset", ECF_NOTHROW | ECF_LEAF);
9858 /* If we're checking the stack, `alloca' can throw. */
9859 const int alloca_flags
9860 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9862 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9864 ftype = build_function_type_list (ptr_type_node,
9865 size_type_node, NULL_TREE);
9866 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9867 "alloca", alloca_flags);
9870 ftype = build_function_type_list (ptr_type_node, size_type_node,
9871 size_type_node, NULL_TREE);
9872 local_define_builtin ("__builtin_alloca_with_align", ftype,
9873 BUILT_IN_ALLOCA_WITH_ALIGN,
9874 "__builtin_alloca_with_align",
9875 alloca_flags);
9877 ftype = build_function_type_list (ptr_type_node, size_type_node,
9878 size_type_node, size_type_node, NULL_TREE);
9879 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9880 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9881 "__builtin_alloca_with_align_and_max",
9882 alloca_flags);
9884 ftype = build_function_type_list (void_type_node,
9885 ptr_type_node, ptr_type_node,
9886 ptr_type_node, NULL_TREE);
9887 local_define_builtin ("__builtin_init_trampoline", ftype,
9888 BUILT_IN_INIT_TRAMPOLINE,
9889 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9890 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9891 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9892 "__builtin_init_heap_trampoline",
9893 ECF_NOTHROW | ECF_LEAF);
9894 local_define_builtin ("__builtin_init_descriptor", ftype,
9895 BUILT_IN_INIT_DESCRIPTOR,
9896 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9898 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9899 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9900 BUILT_IN_ADJUST_TRAMPOLINE,
9901 "__builtin_adjust_trampoline",
9902 ECF_CONST | ECF_NOTHROW);
9903 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9904 BUILT_IN_ADJUST_DESCRIPTOR,
9905 "__builtin_adjust_descriptor",
9906 ECF_CONST | ECF_NOTHROW);
9908 ftype = build_function_type_list (void_type_node,
9909 ptr_type_node, ptr_type_node, NULL_TREE);
9910 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9911 local_define_builtin ("__builtin___clear_cache", ftype,
9912 BUILT_IN_CLEAR_CACHE,
9913 "__clear_cache",
9914 ECF_NOTHROW);
9916 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9917 BUILT_IN_NONLOCAL_GOTO,
9918 "__builtin_nonlocal_goto",
9919 ECF_NORETURN | ECF_NOTHROW);
9921 ftype = build_function_type_list (void_type_node,
9922 ptr_type_node, ptr_type_node, NULL_TREE);
9923 local_define_builtin ("__builtin_setjmp_setup", ftype,
9924 BUILT_IN_SETJMP_SETUP,
9925 "__builtin_setjmp_setup", ECF_NOTHROW);
9927 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9928 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9929 BUILT_IN_SETJMP_RECEIVER,
9930 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9932 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9933 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9934 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9936 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9937 local_define_builtin ("__builtin_stack_restore", ftype,
9938 BUILT_IN_STACK_RESTORE,
9939 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9941 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9942 const_ptr_type_node, size_type_node,
9943 NULL_TREE);
9944 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9945 "__builtin_memcmp_eq",
9946 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9948 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9949 "__builtin_strncmp_eq",
9950 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9952 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9953 "__builtin_strcmp_eq",
9954 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9956 /* If there's a possibility that we might use the ARM EABI, build the
9957 alternate __cxa_end_cleanup node used to resume from C++. */
9958 if (targetm.arm_eabi_unwinder)
9960 ftype = build_function_type_list (void_type_node, NULL_TREE);
9961 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9962 BUILT_IN_CXA_END_CLEANUP,
9963 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9966 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9967 local_define_builtin ("__builtin_unwind_resume", ftype,
9968 BUILT_IN_UNWIND_RESUME,
9969 ((targetm_common.except_unwind_info (&global_options)
9970 == UI_SJLJ)
9971 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9972 ECF_NORETURN);
9974 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9976 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9977 NULL_TREE);
9978 local_define_builtin ("__builtin_return_address", ftype,
9979 BUILT_IN_RETURN_ADDRESS,
9980 "__builtin_return_address",
9981 ECF_NOTHROW);
9984 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9985 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9987 ftype = build_function_type_list (void_type_node, ptr_type_node,
9988 ptr_type_node, NULL_TREE);
9989 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9990 local_define_builtin ("__cyg_profile_func_enter", ftype,
9991 BUILT_IN_PROFILE_FUNC_ENTER,
9992 "__cyg_profile_func_enter", 0);
9993 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9994 local_define_builtin ("__cyg_profile_func_exit", ftype,
9995 BUILT_IN_PROFILE_FUNC_EXIT,
9996 "__cyg_profile_func_exit", 0);
9999 /* The exception object and filter values from the runtime. The argument
10000 must be zero before exception lowering, i.e. from the front end. After
10001 exception lowering, it will be the region number for the exception
10002 landing pad. These functions are PURE instead of CONST to prevent
10003 them from being hoisted past the exception edge that will initialize
10004 its value in the landing pad. */
10005 ftype = build_function_type_list (ptr_type_node,
10006 integer_type_node, NULL_TREE);
10007 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10008 /* Only use TM_PURE if we have TM language support. */
10009 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10010 ecf_flags |= ECF_TM_PURE;
10011 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10012 "__builtin_eh_pointer", ecf_flags);
10014 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10015 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10016 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10017 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10019 ftype = build_function_type_list (void_type_node,
10020 integer_type_node, integer_type_node,
10021 NULL_TREE);
10022 local_define_builtin ("__builtin_eh_copy_values", ftype,
10023 BUILT_IN_EH_COPY_VALUES,
10024 "__builtin_eh_copy_values", ECF_NOTHROW);
10026 /* Complex multiplication and division. These are handled as builtins
10027 rather than optabs because emit_library_call_value doesn't support
10028 complex. Further, we can do slightly better with folding these
10029 beasties if the real and complex parts of the arguments are separate. */
10031 int mode;
10033 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10035 char mode_name_buf[4], *q;
10036 const char *p;
10037 enum built_in_function mcode, dcode;
10038 tree type, inner_type;
10039 const char *prefix = "__";
10041 if (targetm.libfunc_gnu_prefix)
10042 prefix = "__gnu_";
10044 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10045 if (type == NULL)
10046 continue;
10047 inner_type = TREE_TYPE (type);
10049 ftype = build_function_type_list (type, inner_type, inner_type,
10050 inner_type, inner_type, NULL_TREE);
10052 mcode = ((enum built_in_function)
10053 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10054 dcode = ((enum built_in_function)
10055 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10057 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10058 *q = TOLOWER (*p);
10059 *q = '\0';
10061 /* For -ftrapping-math these should throw from a former
10062 -fnon-call-exception stmt. */
10063 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10064 NULL);
10065 local_define_builtin (built_in_names[mcode], ftype, mcode,
10066 built_in_names[mcode],
10067 ECF_CONST | ECF_LEAF);
10069 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10070 NULL);
10071 local_define_builtin (built_in_names[dcode], ftype, dcode,
10072 built_in_names[dcode],
10073 ECF_CONST | ECF_LEAF);
10077 init_internal_fns ();
10080 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10081 better way.
10083 If we requested a pointer to a vector, build up the pointers that
10084 we stripped off while looking for the inner type. Similarly for
10085 return values from functions.
10087 The argument TYPE is the top of the chain, and BOTTOM is the
10088 new type which we will point to. */
10090 tree
10091 reconstruct_complex_type (tree type, tree bottom)
10093 tree inner, outer;
10095 if (TREE_CODE (type) == POINTER_TYPE)
10097 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10098 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10099 TYPE_REF_CAN_ALIAS_ALL (type));
10101 else if (TREE_CODE (type) == REFERENCE_TYPE)
10103 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10104 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10105 TYPE_REF_CAN_ALIAS_ALL (type));
10107 else if (TREE_CODE (type) == ARRAY_TYPE)
10109 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10110 outer = build_array_type (inner, TYPE_DOMAIN (type));
10112 else if (TREE_CODE (type) == FUNCTION_TYPE)
10114 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10115 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10116 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10118 else if (TREE_CODE (type) == METHOD_TYPE)
10120 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10121 /* The build_method_type_directly() routine prepends 'this' to argument list,
10122 so we must compensate by getting rid of it. */
10123 outer
10124 = build_method_type_directly
10125 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10126 inner,
10127 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10129 else if (TREE_CODE (type) == OFFSET_TYPE)
10131 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10132 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10134 else
10135 return bottom;
10137 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10138 TYPE_QUALS (type));
10141 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10142 the inner type. */
10143 tree
10144 build_vector_type_for_mode (tree innertype, machine_mode mode)
10146 poly_int64 nunits;
10147 unsigned int bitsize;
10149 switch (GET_MODE_CLASS (mode))
10151 case MODE_VECTOR_BOOL:
10152 case MODE_VECTOR_INT:
10153 case MODE_VECTOR_FLOAT:
10154 case MODE_VECTOR_FRACT:
10155 case MODE_VECTOR_UFRACT:
10156 case MODE_VECTOR_ACCUM:
10157 case MODE_VECTOR_UACCUM:
10158 nunits = GET_MODE_NUNITS (mode);
10159 break;
10161 case MODE_INT:
10162 /* Check that there are no leftover bits. */
10163 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10164 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10165 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10166 break;
10168 default:
10169 gcc_unreachable ();
10172 return make_vector_type (innertype, nunits, mode);
10175 /* Similarly, but takes the inner type and number of units, which must be
10176 a power of two. */
10178 tree
10179 build_vector_type (tree innertype, poly_int64 nunits)
10181 return make_vector_type (innertype, nunits, VOIDmode);
10184 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10186 tree
10187 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10189 gcc_assert (mask_mode != BLKmode);
10191 unsigned HOST_WIDE_INT esize;
10192 if (VECTOR_MODE_P (mask_mode))
10194 poly_uint64 vsize = GET_MODE_PRECISION (mask_mode);
10195 esize = vector_element_size (vsize, nunits);
10197 else
10198 esize = 1;
10200 tree bool_type = build_nonstandard_boolean_type (esize);
10202 return make_vector_type (bool_type, nunits, mask_mode);
10205 /* Build a vector type that holds one boolean result for each element of
10206 vector type VECTYPE. The public interface for this operation is
10207 truth_type_for. */
10209 static tree
10210 build_truth_vector_type_for (tree vectype)
10212 machine_mode vector_mode = TYPE_MODE (vectype);
10213 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10215 machine_mode mask_mode;
10216 if (VECTOR_MODE_P (vector_mode)
10217 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10218 return build_truth_vector_type_for_mode (nunits, mask_mode);
10220 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10221 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10222 tree bool_type = build_nonstandard_boolean_type (esize);
10224 return make_vector_type (bool_type, nunits, VOIDmode);
10227 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10228 set. */
10230 tree
10231 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10233 tree t = make_vector_type (innertype, nunits, VOIDmode);
10234 tree cand;
10235 /* We always build the non-opaque variant before the opaque one,
10236 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10237 cand = TYPE_NEXT_VARIANT (t);
10238 if (cand
10239 && TYPE_VECTOR_OPAQUE (cand)
10240 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10241 return cand;
10242 /* Othewise build a variant type and make sure to queue it after
10243 the non-opaque type. */
10244 cand = build_distinct_type_copy (t);
10245 TYPE_VECTOR_OPAQUE (cand) = true;
10246 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10247 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10248 TYPE_NEXT_VARIANT (t) = cand;
10249 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10250 return cand;
10253 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10255 static poly_wide_int
10256 vector_cst_int_elt (const_tree t, unsigned int i)
10258 /* First handle elements that are directly encoded. */
10259 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10260 if (i < encoded_nelts)
10261 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10263 /* Identify the pattern that contains element I and work out the index of
10264 the last encoded element for that pattern. */
10265 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10266 unsigned int pattern = i % npatterns;
10267 unsigned int count = i / npatterns;
10268 unsigned int final_i = encoded_nelts - npatterns + pattern;
10270 /* If there are no steps, the final encoded value is the right one. */
10271 if (!VECTOR_CST_STEPPED_P (t))
10272 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10274 /* Otherwise work out the value from the last two encoded elements. */
10275 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10276 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10277 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10278 return wi::to_poly_wide (v2) + (count - 2) * diff;
10281 /* Return the value of element I of VECTOR_CST T. */
10283 tree
10284 vector_cst_elt (const_tree t, unsigned int i)
10286 /* First handle elements that are directly encoded. */
10287 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10288 if (i < encoded_nelts)
10289 return VECTOR_CST_ENCODED_ELT (t, i);
10291 /* If there are no steps, the final encoded value is the right one. */
10292 if (!VECTOR_CST_STEPPED_P (t))
10294 /* Identify the pattern that contains element I and work out the index of
10295 the last encoded element for that pattern. */
10296 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10297 unsigned int pattern = i % npatterns;
10298 unsigned int final_i = encoded_nelts - npatterns + pattern;
10299 return VECTOR_CST_ENCODED_ELT (t, final_i);
10302 /* Otherwise work out the value from the last two encoded elements. */
10303 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10304 vector_cst_int_elt (t, i));
10307 /* Given an initializer INIT, return TRUE if INIT is zero or some
10308 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10309 null, set *NONZERO if and only if INIT is known not to be all
10310 zeros. The combination of return value of false and *NONZERO
10311 false implies that INIT may but need not be all zeros. Other
10312 combinations indicate definitive answers. */
10314 bool
10315 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10317 bool dummy;
10318 if (!nonzero)
10319 nonzero = &dummy;
10321 /* Conservatively clear NONZERO and set it only if INIT is definitely
10322 not all zero. */
10323 *nonzero = false;
10325 STRIP_NOPS (init);
10327 unsigned HOST_WIDE_INT off = 0;
10329 switch (TREE_CODE (init))
10331 case INTEGER_CST:
10332 if (integer_zerop (init))
10333 return true;
10335 *nonzero = true;
10336 return false;
10338 case REAL_CST:
10339 /* ??? Note that this is not correct for C4X float formats. There,
10340 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10341 negative exponent. */
10342 if (real_zerop (init)
10343 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10344 return true;
10346 *nonzero = true;
10347 return false;
10349 case FIXED_CST:
10350 if (fixed_zerop (init))
10351 return true;
10353 *nonzero = true;
10354 return false;
10356 case COMPLEX_CST:
10357 if (integer_zerop (init)
10358 || (real_zerop (init)
10359 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10360 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10361 return true;
10363 *nonzero = true;
10364 return false;
10366 case VECTOR_CST:
10367 if (VECTOR_CST_NPATTERNS (init) == 1
10368 && VECTOR_CST_DUPLICATE_P (init)
10369 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10370 return true;
10372 *nonzero = true;
10373 return false;
10375 case CONSTRUCTOR:
10377 if (TREE_CLOBBER_P (init))
10378 return false;
10380 unsigned HOST_WIDE_INT idx;
10381 tree elt;
10383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10384 if (!initializer_zerop (elt, nonzero))
10385 return false;
10387 return true;
10390 case MEM_REF:
10392 tree arg = TREE_OPERAND (init, 0);
10393 if (TREE_CODE (arg) != ADDR_EXPR)
10394 return false;
10395 tree offset = TREE_OPERAND (init, 1);
10396 if (TREE_CODE (offset) != INTEGER_CST
10397 || !tree_fits_uhwi_p (offset))
10398 return false;
10399 off = tree_to_uhwi (offset);
10400 if (INT_MAX < off)
10401 return false;
10402 arg = TREE_OPERAND (arg, 0);
10403 if (TREE_CODE (arg) != STRING_CST)
10404 return false;
10405 init = arg;
10407 /* Fall through. */
10409 case STRING_CST:
10411 gcc_assert (off <= INT_MAX);
10413 int i = off;
10414 int n = TREE_STRING_LENGTH (init);
10415 if (n <= i)
10416 return false;
10418 /* We need to loop through all elements to handle cases like
10419 "\0" and "\0foobar". */
10420 for (i = 0; i < n; ++i)
10421 if (TREE_STRING_POINTER (init)[i] != '\0')
10423 *nonzero = true;
10424 return false;
10427 return true;
10430 default:
10431 return false;
10435 /* Return true if EXPR is an initializer expression in which every element
10436 is a constant that is numerically equal to 0 or 1. The elements do not
10437 need to be equal to each other. */
10439 bool
10440 initializer_each_zero_or_onep (const_tree expr)
10442 STRIP_ANY_LOCATION_WRAPPER (expr);
10444 switch (TREE_CODE (expr))
10446 case INTEGER_CST:
10447 return integer_zerop (expr) || integer_onep (expr);
10449 case REAL_CST:
10450 return real_zerop (expr) || real_onep (expr);
10452 case VECTOR_CST:
10454 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10455 if (VECTOR_CST_STEPPED_P (expr)
10456 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10457 return false;
10459 for (unsigned int i = 0; i < nelts; ++i)
10461 tree elt = vector_cst_elt (expr, i);
10462 if (!initializer_each_zero_or_onep (elt))
10463 return false;
10466 return true;
10469 default:
10470 return false;
10474 /* Check if vector VEC consists of all the equal elements and
10475 that the number of elements corresponds to the type of VEC.
10476 The function returns first element of the vector
10477 or NULL_TREE if the vector is not uniform. */
10478 tree
10479 uniform_vector_p (const_tree vec)
10481 tree first, t;
10482 unsigned HOST_WIDE_INT i, nelts;
10484 if (vec == NULL_TREE)
10485 return NULL_TREE;
10487 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10489 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10490 return TREE_OPERAND (vec, 0);
10492 else if (TREE_CODE (vec) == VECTOR_CST)
10494 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10495 return VECTOR_CST_ENCODED_ELT (vec, 0);
10496 return NULL_TREE;
10499 else if (TREE_CODE (vec) == CONSTRUCTOR
10500 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10502 first = error_mark_node;
10504 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10506 if (i == 0)
10508 first = t;
10509 continue;
10511 if (!operand_equal_p (first, t, 0))
10512 return NULL_TREE;
10514 if (i != nelts)
10515 return NULL_TREE;
10517 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10518 return uniform_vector_p (first);
10519 return first;
10522 return NULL_TREE;
10525 /* If the argument is INTEGER_CST, return it. If the argument is vector
10526 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10527 return NULL_TREE.
10528 Look through location wrappers. */
10530 tree
10531 uniform_integer_cst_p (tree t)
10533 STRIP_ANY_LOCATION_WRAPPER (t);
10535 if (TREE_CODE (t) == INTEGER_CST)
10536 return t;
10538 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10540 t = uniform_vector_p (t);
10541 if (t && TREE_CODE (t) == INTEGER_CST)
10542 return t;
10545 return NULL_TREE;
10548 /* Checks to see if T is a constant or a constant vector and if each element E
10549 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10551 tree
10552 bitmask_inv_cst_vector_p (tree t)
10555 tree_code code = TREE_CODE (t);
10556 tree type = TREE_TYPE (t);
10558 if (!INTEGRAL_TYPE_P (type)
10559 && !VECTOR_INTEGER_TYPE_P (type))
10560 return NULL_TREE;
10562 unsigned HOST_WIDE_INT nelts = 1;
10563 tree cst;
10564 unsigned int idx = 0;
10565 bool uniform = uniform_integer_cst_p (t);
10566 tree newtype = unsigned_type_for (type);
10567 tree_vector_builder builder;
10568 if (code == INTEGER_CST)
10569 cst = t;
10570 else
10572 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10573 return NULL_TREE;
10575 cst = vector_cst_elt (t, 0);
10576 builder.new_vector (newtype, nelts, 1);
10579 tree ty = unsigned_type_for (TREE_TYPE (cst));
10583 if (idx > 0)
10584 cst = vector_cst_elt (t, idx);
10585 wide_int icst = wi::to_wide (cst);
10586 wide_int inv = wi::bit_not (icst);
10587 icst = wi::add (1, inv);
10588 if (wi::popcount (icst) != 1)
10589 return NULL_TREE;
10591 tree newcst = wide_int_to_tree (ty, inv);
10593 if (uniform)
10594 return build_uniform_cst (newtype, newcst);
10596 builder.quick_push (newcst);
10598 while (++idx < nelts);
10600 return builder.build ();
10603 /* If VECTOR_CST T has a single nonzero element, return the index of that
10604 element, otherwise return -1. */
10607 single_nonzero_element (const_tree t)
10609 unsigned HOST_WIDE_INT nelts;
10610 unsigned int repeat_nelts;
10611 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10612 repeat_nelts = nelts;
10613 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10615 nelts = vector_cst_encoded_nelts (t);
10616 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10618 else
10619 return -1;
10621 int res = -1;
10622 for (unsigned int i = 0; i < nelts; ++i)
10624 tree elt = vector_cst_elt (t, i);
10625 if (!integer_zerop (elt) && !real_zerop (elt))
10627 if (res >= 0 || i >= repeat_nelts)
10628 return -1;
10629 res = i;
10632 return res;
10635 /* Build an empty statement at location LOC. */
10637 tree
10638 build_empty_stmt (location_t loc)
10640 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10641 SET_EXPR_LOCATION (t, loc);
10642 return t;
10646 /* Build an OMP clause with code CODE. LOC is the location of the
10647 clause. */
10649 tree
10650 build_omp_clause (location_t loc, enum omp_clause_code code)
10652 tree t;
10653 int size, length;
10655 length = omp_clause_num_ops[code];
10656 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10658 record_node_allocation_statistics (OMP_CLAUSE, size);
10660 t = (tree) ggc_internal_alloc (size);
10661 memset (t, 0, size);
10662 TREE_SET_CODE (t, OMP_CLAUSE);
10663 OMP_CLAUSE_SET_CODE (t, code);
10664 OMP_CLAUSE_LOCATION (t) = loc;
10666 return t;
10669 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10670 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10671 Except for the CODE and operand count field, other storage for the
10672 object is initialized to zeros. */
10674 tree
10675 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10677 tree t;
10678 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10680 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10681 gcc_assert (len >= 1);
10683 record_node_allocation_statistics (code, length);
10685 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10687 TREE_SET_CODE (t, code);
10689 /* Can't use TREE_OPERAND to store the length because if checking is
10690 enabled, it will try to check the length before we store it. :-P */
10691 t->exp.operands[0] = build_int_cst (sizetype, len);
10693 return t;
10696 /* Helper function for build_call_* functions; build a CALL_EXPR with
10697 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10698 the argument slots. */
10700 static tree
10701 build_call_1 (tree return_type, tree fn, int nargs)
10703 tree t;
10705 t = build_vl_exp (CALL_EXPR, nargs + 3);
10706 TREE_TYPE (t) = return_type;
10707 CALL_EXPR_FN (t) = fn;
10708 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10710 return t;
10713 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10714 FN and a null static chain slot. NARGS is the number of call arguments
10715 which are specified as "..." arguments. */
10717 tree
10718 build_call_nary (tree return_type, tree fn, int nargs, ...)
10720 tree ret;
10721 va_list args;
10722 va_start (args, nargs);
10723 ret = build_call_valist (return_type, fn, nargs, args);
10724 va_end (args);
10725 return ret;
10728 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10729 FN and a null static chain slot. NARGS is the number of call arguments
10730 which are specified as a va_list ARGS. */
10732 tree
10733 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10735 tree t;
10736 int i;
10738 t = build_call_1 (return_type, fn, nargs);
10739 for (i = 0; i < nargs; i++)
10740 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10741 process_call_operands (t);
10742 return t;
10745 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10746 FN and a null static chain slot. NARGS is the number of call arguments
10747 which are specified as a tree array ARGS. */
10749 tree
10750 build_call_array_loc (location_t loc, tree return_type, tree fn,
10751 int nargs, const tree *args)
10753 tree t;
10754 int i;
10756 t = build_call_1 (return_type, fn, nargs);
10757 for (i = 0; i < nargs; i++)
10758 CALL_EXPR_ARG (t, i) = args[i];
10759 process_call_operands (t);
10760 SET_EXPR_LOCATION (t, loc);
10761 return t;
10764 /* Like build_call_array, but takes a vec. */
10766 tree
10767 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10769 tree ret, t;
10770 unsigned int ix;
10772 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10773 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10774 CALL_EXPR_ARG (ret, ix) = t;
10775 process_call_operands (ret);
10776 return ret;
10779 /* Conveniently construct a function call expression. FNDECL names the
10780 function to be called and N arguments are passed in the array
10781 ARGARRAY. */
10783 tree
10784 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10786 tree fntype = TREE_TYPE (fndecl);
10787 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10789 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10792 /* Conveniently construct a function call expression. FNDECL names the
10793 function to be called and the arguments are passed in the vector
10794 VEC. */
10796 tree
10797 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10799 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10800 vec_safe_address (vec));
10804 /* Conveniently construct a function call expression. FNDECL names the
10805 function to be called, N is the number of arguments, and the "..."
10806 parameters are the argument expressions. */
10808 tree
10809 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10811 va_list ap;
10812 tree *argarray = XALLOCAVEC (tree, n);
10813 int i;
10815 va_start (ap, n);
10816 for (i = 0; i < n; i++)
10817 argarray[i] = va_arg (ap, tree);
10818 va_end (ap);
10819 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10822 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10823 varargs macros aren't supported by all bootstrap compilers. */
10825 tree
10826 build_call_expr (tree fndecl, int n, ...)
10828 va_list ap;
10829 tree *argarray = XALLOCAVEC (tree, n);
10830 int i;
10832 va_start (ap, n);
10833 for (i = 0; i < n; i++)
10834 argarray[i] = va_arg (ap, tree);
10835 va_end (ap);
10836 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10839 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10840 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10841 It will get gimplified later into an ordinary internal function. */
10843 tree
10844 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10845 tree type, int n, const tree *args)
10847 tree t = build_call_1 (type, NULL_TREE, n);
10848 for (int i = 0; i < n; ++i)
10849 CALL_EXPR_ARG (t, i) = args[i];
10850 SET_EXPR_LOCATION (t, loc);
10851 CALL_EXPR_IFN (t) = ifn;
10852 process_call_operands (t);
10853 return t;
10856 /* Build internal call expression. This is just like CALL_EXPR, except
10857 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10858 internal function. */
10860 tree
10861 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10862 tree type, int n, ...)
10864 va_list ap;
10865 tree *argarray = XALLOCAVEC (tree, n);
10866 int i;
10868 va_start (ap, n);
10869 for (i = 0; i < n; i++)
10870 argarray[i] = va_arg (ap, tree);
10871 va_end (ap);
10872 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10875 /* Return a function call to FN, if the target is guaranteed to support it,
10876 or null otherwise.
10878 N is the number of arguments, passed in the "...", and TYPE is the
10879 type of the return value. */
10881 tree
10882 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10883 int n, ...)
10885 va_list ap;
10886 tree *argarray = XALLOCAVEC (tree, n);
10887 int i;
10889 va_start (ap, n);
10890 for (i = 0; i < n; i++)
10891 argarray[i] = va_arg (ap, tree);
10892 va_end (ap);
10893 if (internal_fn_p (fn))
10895 internal_fn ifn = as_internal_fn (fn);
10896 if (direct_internal_fn_p (ifn))
10898 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10899 if (!direct_internal_fn_supported_p (ifn, types,
10900 OPTIMIZE_FOR_BOTH))
10901 return NULL_TREE;
10903 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10905 else
10907 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10908 if (!fndecl)
10909 return NULL_TREE;
10910 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10914 /* Return a function call to the appropriate builtin alloca variant.
10916 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10917 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10918 bound for SIZE in case it is not a fixed value. */
10920 tree
10921 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10923 if (max_size >= 0)
10925 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10926 return
10927 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10929 else if (align > 0)
10931 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10932 return build_call_expr (t, 2, size, size_int (align));
10934 else
10936 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10937 return build_call_expr (t, 1, size);
10941 /* The built-in decl to use to mark code points believed to be unreachable.
10942 Typically __builtin_unreachable, but __builtin_trap if
10943 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10944 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10945 appropriate ubsan function. When building a call directly, use
10946 {gimple_},build_builtin_unreachable instead. */
10948 tree
10949 builtin_decl_unreachable ()
10951 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10953 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10954 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10955 : flag_unreachable_traps)
10956 fncode = BUILT_IN_UNREACHABLE_TRAP;
10957 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10958 in the sanopt pass. */
10960 return builtin_decl_explicit (fncode);
10963 /* Build a call to __builtin_unreachable, possibly rewritten by
10964 -fsanitize=unreachable. Use this rather than the above when practical. */
10966 tree
10967 build_builtin_unreachable (location_t loc)
10969 tree data = NULL_TREE;
10970 tree fn = sanitize_unreachable_fn (&data, loc);
10971 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10974 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10975 if SIZE == -1) and return a tree node representing char* pointer to
10976 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10977 the STRING_CST value is the LEN bytes at STR (the representation
10978 of the string, which may be wide). Otherwise it's all zeros. */
10980 tree
10981 build_string_literal (unsigned len, const char *str /* = NULL */,
10982 tree eltype /* = char_type_node */,
10983 unsigned HOST_WIDE_INT size /* = -1 */)
10985 tree t = build_string (len, str);
10986 /* Set the maximum valid index based on the string length or SIZE. */
10987 unsigned HOST_WIDE_INT maxidx
10988 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10990 tree index = build_index_type (size_int (maxidx));
10991 eltype = build_type_variant (eltype, 1, 0);
10992 tree type = build_array_type (eltype, index);
10993 TREE_TYPE (t) = type;
10994 TREE_CONSTANT (t) = 1;
10995 TREE_READONLY (t) = 1;
10996 TREE_STATIC (t) = 1;
10998 type = build_pointer_type (eltype);
10999 t = build1 (ADDR_EXPR, type,
11000 build4 (ARRAY_REF, eltype,
11001 t, integer_zero_node, NULL_TREE, NULL_TREE));
11002 return t;
11007 /* Return true if T (assumed to be a DECL) must be assigned a memory
11008 location. */
11010 bool
11011 needs_to_live_in_memory (const_tree t)
11013 return (TREE_ADDRESSABLE (t)
11014 || is_global_var (t)
11015 || (TREE_CODE (t) == RESULT_DECL
11016 && !DECL_BY_REFERENCE (t)
11017 && aggregate_value_p (t, current_function_decl)));
11020 /* Return value of a constant X and sign-extend it. */
11022 HOST_WIDE_INT
11023 int_cst_value (const_tree x)
11025 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11026 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11028 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11029 gcc_assert (cst_and_fits_in_hwi (x));
11031 if (bits < HOST_BITS_PER_WIDE_INT)
11033 bool negative = ((val >> (bits - 1)) & 1) != 0;
11034 if (negative)
11035 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11036 else
11037 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11040 return val;
11043 /* If TYPE is an integral or pointer type, return an integer type with
11044 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11045 if TYPE is already an integer type of signedness UNSIGNEDP.
11046 If TYPE is a floating-point type, return an integer type with the same
11047 bitsize and with the signedness given by UNSIGNEDP; this is useful
11048 when doing bit-level operations on a floating-point value. */
11050 tree
11051 signed_or_unsigned_type_for (int unsignedp, tree type)
11053 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11054 return type;
11056 if (TREE_CODE (type) == VECTOR_TYPE)
11058 tree inner = TREE_TYPE (type);
11059 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11060 if (!inner2)
11061 return NULL_TREE;
11062 if (inner == inner2)
11063 return type;
11064 machine_mode new_mode;
11065 if (VECTOR_MODE_P (TYPE_MODE (type))
11066 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11067 return build_vector_type_for_mode (inner2, new_mode);
11068 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11071 if (TREE_CODE (type) == COMPLEX_TYPE)
11073 tree inner = TREE_TYPE (type);
11074 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11075 if (!inner2)
11076 return NULL_TREE;
11077 if (inner == inner2)
11078 return type;
11079 return build_complex_type (inner2);
11082 unsigned int bits;
11083 if (INTEGRAL_TYPE_P (type)
11084 || POINTER_TYPE_P (type)
11085 || TREE_CODE (type) == OFFSET_TYPE)
11086 bits = TYPE_PRECISION (type);
11087 else if (TREE_CODE (type) == REAL_TYPE)
11088 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11089 else
11090 return NULL_TREE;
11092 if (TREE_CODE (type) == BITINT_TYPE && (unsignedp || bits > 1))
11093 return build_bitint_type (bits, unsignedp);
11094 return build_nonstandard_integer_type (bits, unsignedp);
11097 /* If TYPE is an integral or pointer type, return an integer type with
11098 the same precision which is unsigned, or itself if TYPE is already an
11099 unsigned integer type. If TYPE is a floating-point type, return an
11100 unsigned integer type with the same bitsize as TYPE. */
11102 tree
11103 unsigned_type_for (tree type)
11105 return signed_or_unsigned_type_for (1, type);
11108 /* If TYPE is an integral or pointer type, return an integer type with
11109 the same precision which is signed, or itself if TYPE is already a
11110 signed integer type. If TYPE is a floating-point type, return a
11111 signed integer type with the same bitsize as TYPE. */
11113 tree
11114 signed_type_for (tree type)
11116 return signed_or_unsigned_type_for (0, type);
11119 /* - For VECTOR_TYPEs:
11120 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11121 - The number of elements must match (known_eq).
11122 - targetm.vectorize.get_mask_mode exists, and exactly
11123 the same mode as the truth type.
11124 - Otherwise, the truth type must be a BOOLEAN_TYPE
11125 or useless_type_conversion_p to BOOLEAN_TYPE. */
11126 bool
11127 is_truth_type_for (tree type, tree truth_type)
11129 machine_mode mask_mode = TYPE_MODE (truth_type);
11130 machine_mode vmode = TYPE_MODE (type);
11131 machine_mode tmask_mode;
11133 if (TREE_CODE (type) == VECTOR_TYPE)
11135 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11136 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11137 TYPE_VECTOR_SUBPARTS (truth_type))
11138 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11139 && tmask_mode == mask_mode)
11140 return true;
11142 return false;
11145 return useless_type_conversion_p (boolean_type_node, truth_type);
11148 /* If TYPE is a vector type, return a signed integer vector type with the
11149 same width and number of subparts. Otherwise return boolean_type_node. */
11151 tree
11152 truth_type_for (tree type)
11154 if (TREE_CODE (type) == VECTOR_TYPE)
11156 if (VECTOR_BOOLEAN_TYPE_P (type))
11157 return type;
11158 return build_truth_vector_type_for (type);
11160 else
11161 return boolean_type_node;
11164 /* Returns the largest value obtainable by casting something in INNER type to
11165 OUTER type. */
11167 tree
11168 upper_bound_in_type (tree outer, tree inner)
11170 unsigned int det = 0;
11171 unsigned oprec = TYPE_PRECISION (outer);
11172 unsigned iprec = TYPE_PRECISION (inner);
11173 unsigned prec;
11175 /* Compute a unique number for every combination. */
11176 det |= (oprec > iprec) ? 4 : 0;
11177 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11178 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11180 /* Determine the exponent to use. */
11181 switch (det)
11183 case 0:
11184 case 1:
11185 /* oprec <= iprec, outer: signed, inner: don't care. */
11186 prec = oprec - 1;
11187 break;
11188 case 2:
11189 case 3:
11190 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11191 prec = oprec;
11192 break;
11193 case 4:
11194 /* oprec > iprec, outer: signed, inner: signed. */
11195 prec = iprec - 1;
11196 break;
11197 case 5:
11198 /* oprec > iprec, outer: signed, inner: unsigned. */
11199 prec = iprec;
11200 break;
11201 case 6:
11202 /* oprec > iprec, outer: unsigned, inner: signed. */
11203 prec = oprec;
11204 break;
11205 case 7:
11206 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11207 prec = iprec;
11208 break;
11209 default:
11210 gcc_unreachable ();
11213 return wide_int_to_tree (outer,
11214 wi::mask (prec, false, TYPE_PRECISION (outer)));
11217 /* Returns the smallest value obtainable by casting something in INNER type to
11218 OUTER type. */
11220 tree
11221 lower_bound_in_type (tree outer, tree inner)
11223 unsigned oprec = TYPE_PRECISION (outer);
11224 unsigned iprec = TYPE_PRECISION (inner);
11226 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11227 and obtain 0. */
11228 if (TYPE_UNSIGNED (outer)
11229 /* If we are widening something of an unsigned type, OUTER type
11230 contains all values of INNER type. In particular, both INNER
11231 and OUTER types have zero in common. */
11232 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11233 return build_int_cst (outer, 0);
11234 else
11236 /* If we are widening a signed type to another signed type, we
11237 want to obtain -2^^(iprec-1). If we are keeping the
11238 precision or narrowing to a signed type, we want to obtain
11239 -2^(oprec-1). */
11240 unsigned prec = oprec > iprec ? iprec : oprec;
11241 return wide_int_to_tree (outer,
11242 wi::mask (prec - 1, true,
11243 TYPE_PRECISION (outer)));
11247 /* Return true if two operands that are suitable for PHI nodes are
11248 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11249 SSA_NAME or invariant. Note that this is strictly an optimization.
11250 That is, callers of this function can directly call operand_equal_p
11251 and get the same result, only slower. */
11253 bool
11254 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11256 if (arg0 == arg1)
11257 return true;
11258 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11259 return false;
11260 return operand_equal_p (arg0, arg1, 0);
11263 /* Returns number of zeros at the end of binary representation of X. */
11265 tree
11266 num_ending_zeros (const_tree x)
11268 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11272 #define WALK_SUBTREE(NODE) \
11273 do \
11275 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11276 if (result) \
11277 return result; \
11279 while (0)
11281 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11282 be walked whenever a type is seen in the tree. Rest of operands and return
11283 value are as for walk_tree. */
11285 static tree
11286 walk_type_fields (tree type, walk_tree_fn func, void *data,
11287 hash_set<tree> *pset, walk_tree_lh lh)
11289 tree result = NULL_TREE;
11291 switch (TREE_CODE (type))
11293 case POINTER_TYPE:
11294 case REFERENCE_TYPE:
11295 case VECTOR_TYPE:
11296 /* We have to worry about mutually recursive pointers. These can't
11297 be written in C. They can in Ada. It's pathological, but
11298 there's an ACATS test (c38102a) that checks it. Deal with this
11299 by checking if we're pointing to another pointer, that one
11300 points to another pointer, that one does too, and we have no htab.
11301 If so, get a hash table. We check three levels deep to avoid
11302 the cost of the hash table if we don't need one. */
11303 if (POINTER_TYPE_P (TREE_TYPE (type))
11304 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11305 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11306 && !pset)
11308 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11309 func, data);
11310 if (result)
11311 return result;
11313 break;
11316 /* fall through */
11318 case COMPLEX_TYPE:
11319 WALK_SUBTREE (TREE_TYPE (type));
11320 break;
11322 case METHOD_TYPE:
11323 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11325 /* Fall through. */
11327 case FUNCTION_TYPE:
11328 WALK_SUBTREE (TREE_TYPE (type));
11330 tree arg;
11332 /* We never want to walk into default arguments. */
11333 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11334 WALK_SUBTREE (TREE_VALUE (arg));
11336 break;
11338 case ARRAY_TYPE:
11339 /* Don't follow this nodes's type if a pointer for fear that
11340 we'll have infinite recursion. If we have a PSET, then we
11341 need not fear. */
11342 if (pset
11343 || (!POINTER_TYPE_P (TREE_TYPE (type))
11344 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11345 WALK_SUBTREE (TREE_TYPE (type));
11346 WALK_SUBTREE (TYPE_DOMAIN (type));
11347 break;
11349 case OFFSET_TYPE:
11350 WALK_SUBTREE (TREE_TYPE (type));
11351 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11352 break;
11354 default:
11355 break;
11358 return NULL_TREE;
11361 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11362 called with the DATA and the address of each sub-tree. If FUNC returns a
11363 non-NULL value, the traversal is stopped, and the value returned by FUNC
11364 is returned. If PSET is non-NULL it is used to record the nodes visited,
11365 and to avoid visiting a node more than once. */
11367 tree
11368 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11369 hash_set<tree> *pset, walk_tree_lh lh)
11371 #define WALK_SUBTREE_TAIL(NODE) \
11372 do \
11374 tp = & (NODE); \
11375 goto tail_recurse; \
11377 while (0)
11379 tail_recurse:
11380 /* Skip empty subtrees. */
11381 if (!*tp)
11382 return NULL_TREE;
11384 /* Don't walk the same tree twice, if the user has requested
11385 that we avoid doing so. */
11386 if (pset && pset->add (*tp))
11387 return NULL_TREE;
11389 /* Call the function. */
11390 int walk_subtrees = 1;
11391 tree result = (*func) (tp, &walk_subtrees, data);
11393 /* If we found something, return it. */
11394 if (result)
11395 return result;
11397 tree t = *tp;
11398 tree_code code = TREE_CODE (t);
11400 /* Even if we didn't, FUNC may have decided that there was nothing
11401 interesting below this point in the tree. */
11402 if (!walk_subtrees)
11404 /* But we still need to check our siblings. */
11405 if (code == TREE_LIST)
11406 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11407 else if (code == OMP_CLAUSE)
11408 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11409 else
11410 return NULL_TREE;
11413 if (lh)
11415 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11416 if (result || !walk_subtrees)
11417 return result;
11420 switch (code)
11422 case ERROR_MARK:
11423 case IDENTIFIER_NODE:
11424 case INTEGER_CST:
11425 case REAL_CST:
11426 case FIXED_CST:
11427 case STRING_CST:
11428 case BLOCK:
11429 case PLACEHOLDER_EXPR:
11430 case SSA_NAME:
11431 case FIELD_DECL:
11432 case RESULT_DECL:
11433 /* None of these have subtrees other than those already walked
11434 above. */
11435 break;
11437 case TREE_LIST:
11438 WALK_SUBTREE (TREE_VALUE (t));
11439 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11441 case TREE_VEC:
11443 int len = TREE_VEC_LENGTH (t);
11445 if (len == 0)
11446 break;
11448 /* Walk all elements but the last. */
11449 for (int i = 0; i < len - 1; ++i)
11450 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11452 /* Now walk the last one as a tail call. */
11453 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11456 case VECTOR_CST:
11458 unsigned len = vector_cst_encoded_nelts (t);
11459 if (len == 0)
11460 break;
11461 /* Walk all elements but the last. */
11462 for (unsigned i = 0; i < len - 1; ++i)
11463 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11464 /* Now walk the last one as a tail call. */
11465 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11468 case COMPLEX_CST:
11469 WALK_SUBTREE (TREE_REALPART (t));
11470 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11472 case CONSTRUCTOR:
11474 unsigned HOST_WIDE_INT idx;
11475 constructor_elt *ce;
11477 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11478 idx++)
11479 WALK_SUBTREE (ce->value);
11481 break;
11483 case SAVE_EXPR:
11484 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11486 case BIND_EXPR:
11488 tree decl;
11489 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11491 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11492 into declarations that are just mentioned, rather than
11493 declared; they don't really belong to this part of the tree.
11494 And, we can see cycles: the initializer for a declaration
11495 can refer to the declaration itself. */
11496 WALK_SUBTREE (DECL_INITIAL (decl));
11497 WALK_SUBTREE (DECL_SIZE (decl));
11498 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11500 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11503 case STATEMENT_LIST:
11505 tree_stmt_iterator i;
11506 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11507 WALK_SUBTREE (*tsi_stmt_ptr (i));
11509 break;
11511 case OMP_CLAUSE:
11513 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11514 for (int i = 0; i < len; i++)
11515 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11516 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11519 case TARGET_EXPR:
11521 int i, len;
11523 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11524 But, we only want to walk once. */
11525 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11526 for (i = 0; i < len; ++i)
11527 WALK_SUBTREE (TREE_OPERAND (t, i));
11528 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11531 case DECL_EXPR:
11532 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11533 defining. We only want to walk into these fields of a type in this
11534 case and not in the general case of a mere reference to the type.
11536 The criterion is as follows: if the field can be an expression, it
11537 must be walked only here. This should be in keeping with the fields
11538 that are directly gimplified in gimplify_type_sizes in order for the
11539 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11540 variable-sized types.
11542 Note that DECLs get walked as part of processing the BIND_EXPR. */
11543 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11545 /* Call the function for the decl so e.g. copy_tree_body_r can
11546 replace it with the remapped one. */
11547 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11548 if (result || !walk_subtrees)
11549 return result;
11551 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11552 if (TREE_CODE (*type_p) == ERROR_MARK)
11553 return NULL_TREE;
11555 /* Call the function for the type. See if it returns anything or
11556 doesn't want us to continue. If we are to continue, walk both
11557 the normal fields and those for the declaration case. */
11558 result = (*func) (type_p, &walk_subtrees, data);
11559 if (result || !walk_subtrees)
11560 return result;
11562 tree type = *type_p;
11564 /* But do not walk a pointed-to type since it may itself need to
11565 be walked in the declaration case if it isn't anonymous. */
11566 if (!POINTER_TYPE_P (type))
11568 result = walk_type_fields (type, func, data, pset, lh);
11569 if (result)
11570 return result;
11573 /* If this is a record type, also walk the fields. */
11574 if (RECORD_OR_UNION_TYPE_P (type))
11576 tree field;
11578 for (field = TYPE_FIELDS (type); field;
11579 field = DECL_CHAIN (field))
11581 /* We'd like to look at the type of the field, but we can
11582 easily get infinite recursion. So assume it's pointed
11583 to elsewhere in the tree. Also, ignore things that
11584 aren't fields. */
11585 if (TREE_CODE (field) != FIELD_DECL)
11586 continue;
11588 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11589 WALK_SUBTREE (DECL_SIZE (field));
11590 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11591 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11592 WALK_SUBTREE (DECL_QUALIFIER (field));
11596 /* Same for scalar types. */
11597 else if (TREE_CODE (type) == BOOLEAN_TYPE
11598 || TREE_CODE (type) == ENUMERAL_TYPE
11599 || TREE_CODE (type) == INTEGER_TYPE
11600 || TREE_CODE (type) == FIXED_POINT_TYPE
11601 || TREE_CODE (type) == REAL_TYPE)
11603 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11604 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11607 WALK_SUBTREE (TYPE_SIZE (type));
11608 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11610 /* FALLTHRU */
11612 default:
11613 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11615 int i, len;
11617 /* Walk over all the sub-trees of this operand. */
11618 len = TREE_OPERAND_LENGTH (t);
11620 /* Go through the subtrees. We need to do this in forward order so
11621 that the scope of a FOR_EXPR is handled properly. */
11622 if (len)
11624 for (i = 0; i < len - 1; ++i)
11625 WALK_SUBTREE (TREE_OPERAND (t, i));
11626 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11629 /* If this is a type, walk the needed fields in the type. */
11630 else if (TYPE_P (t))
11631 return walk_type_fields (t, func, data, pset, lh);
11632 break;
11635 /* We didn't find what we were looking for. */
11636 return NULL_TREE;
11638 #undef WALK_SUBTREE_TAIL
11640 #undef WALK_SUBTREE
11642 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11644 tree
11645 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11646 walk_tree_lh lh)
11648 tree result;
11650 hash_set<tree> pset;
11651 result = walk_tree_1 (tp, func, data, &pset, lh);
11652 return result;
11656 tree
11657 tree_block (tree t)
11659 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11661 if (IS_EXPR_CODE_CLASS (c))
11662 return LOCATION_BLOCK (t->exp.locus);
11663 gcc_unreachable ();
11664 return NULL;
11667 void
11668 tree_set_block (tree t, tree b)
11670 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11672 if (IS_EXPR_CODE_CLASS (c))
11674 t->exp.locus = set_block (t->exp.locus, b);
11676 else
11677 gcc_unreachable ();
11680 /* Create a nameless artificial label and put it in the current
11681 function context. The label has a location of LOC. Returns the
11682 newly created label. */
11684 tree
11685 create_artificial_label (location_t loc)
11687 tree lab = build_decl (loc,
11688 LABEL_DECL, NULL_TREE, void_type_node);
11690 DECL_ARTIFICIAL (lab) = 1;
11691 DECL_IGNORED_P (lab) = 1;
11692 DECL_CONTEXT (lab) = current_function_decl;
11693 return lab;
11696 /* Given a tree, try to return a useful variable name that we can use
11697 to prefix a temporary that is being assigned the value of the tree.
11698 I.E. given <temp> = &A, return A. */
11700 const char *
11701 get_name (tree t)
11703 tree stripped_decl;
11705 stripped_decl = t;
11706 STRIP_NOPS (stripped_decl);
11707 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11708 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11709 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11711 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11712 if (!name)
11713 return NULL;
11714 return IDENTIFIER_POINTER (name);
11716 else
11718 switch (TREE_CODE (stripped_decl))
11720 case ADDR_EXPR:
11721 return get_name (TREE_OPERAND (stripped_decl, 0));
11722 default:
11723 return NULL;
11728 /* Return true if TYPE has a variable argument list. */
11730 bool
11731 stdarg_p (const_tree fntype)
11733 function_args_iterator args_iter;
11734 tree n = NULL_TREE, t;
11736 if (!fntype)
11737 return false;
11739 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11740 return true;
11742 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11744 n = t;
11747 return n != NULL_TREE && n != void_type_node;
11750 /* Return true if TYPE has a prototype. */
11752 bool
11753 prototype_p (const_tree fntype)
11755 tree t;
11757 gcc_assert (fntype != NULL_TREE);
11759 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11760 return true;
11762 t = TYPE_ARG_TYPES (fntype);
11763 return (t != NULL_TREE);
11766 /* If BLOCK is inlined from an __attribute__((__artificial__))
11767 routine, return pointer to location from where it has been
11768 called. */
11769 location_t *
11770 block_nonartificial_location (tree block)
11772 location_t *ret = NULL;
11774 while (block && TREE_CODE (block) == BLOCK
11775 && BLOCK_ABSTRACT_ORIGIN (block))
11777 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11778 if (TREE_CODE (ao) == FUNCTION_DECL)
11780 /* If AO is an artificial inline, point RET to the
11781 call site locus at which it has been inlined and continue
11782 the loop, in case AO's caller is also an artificial
11783 inline. */
11784 if (DECL_DECLARED_INLINE_P (ao)
11785 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11786 ret = &BLOCK_SOURCE_LOCATION (block);
11787 else
11788 break;
11790 else if (TREE_CODE (ao) != BLOCK)
11791 break;
11793 block = BLOCK_SUPERCONTEXT (block);
11795 return ret;
11799 /* If EXP is inlined from an __attribute__((__artificial__))
11800 function, return the location of the original call expression. */
11802 location_t
11803 tree_nonartificial_location (tree exp)
11805 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11807 if (loc)
11808 return *loc;
11809 else
11810 return EXPR_LOCATION (exp);
11813 /* Return the location into which EXP has been inlined. Analogous
11814 to tree_nonartificial_location() above but not limited to artificial
11815 functions declared inline. If SYSTEM_HEADER is true, return
11816 the macro expansion point of the location if it's in a system header */
11818 location_t
11819 tree_inlined_location (tree exp, bool system_header /* = true */)
11821 location_t loc = UNKNOWN_LOCATION;
11823 tree block = TREE_BLOCK (exp);
11825 while (block && TREE_CODE (block) == BLOCK
11826 && BLOCK_ABSTRACT_ORIGIN (block))
11828 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11829 if (TREE_CODE (ao) == FUNCTION_DECL)
11830 loc = BLOCK_SOURCE_LOCATION (block);
11831 else if (TREE_CODE (ao) != BLOCK)
11832 break;
11834 block = BLOCK_SUPERCONTEXT (block);
11837 if (loc == UNKNOWN_LOCATION)
11839 loc = EXPR_LOCATION (exp);
11840 if (system_header)
11841 /* Only consider macro expansion when the block traversal failed
11842 to find a location. Otherwise it's not relevant. */
11843 return expansion_point_location_if_in_system_header (loc);
11846 return loc;
11849 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11850 nodes. */
11852 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11854 hashval_t
11855 cl_option_hasher::hash (tree x)
11857 const_tree const t = x;
11859 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11860 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11861 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11862 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11863 else
11864 gcc_unreachable ();
11867 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11868 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11869 same. */
11871 bool
11872 cl_option_hasher::equal (tree x, tree y)
11874 const_tree const xt = x;
11875 const_tree const yt = y;
11877 if (TREE_CODE (xt) != TREE_CODE (yt))
11878 return false;
11880 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11881 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11882 TREE_OPTIMIZATION (yt));
11883 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11884 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11885 TREE_TARGET_OPTION (yt));
11886 else
11887 gcc_unreachable ();
11890 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11892 tree
11893 build_optimization_node (struct gcc_options *opts,
11894 struct gcc_options *opts_set)
11896 tree t;
11898 /* Use the cache of optimization nodes. */
11900 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11901 opts, opts_set);
11903 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11904 t = *slot;
11905 if (!t)
11907 /* Insert this one into the hash table. */
11908 t = cl_optimization_node;
11909 *slot = t;
11911 /* Make a new node for next time round. */
11912 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11915 return t;
11918 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11920 tree
11921 build_target_option_node (struct gcc_options *opts,
11922 struct gcc_options *opts_set)
11924 tree t;
11926 /* Use the cache of optimization nodes. */
11928 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11929 opts, opts_set);
11931 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11932 t = *slot;
11933 if (!t)
11935 /* Insert this one into the hash table. */
11936 t = cl_target_option_node;
11937 *slot = t;
11939 /* Make a new node for next time round. */
11940 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11943 return t;
11946 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11947 so that they aren't saved during PCH writing. */
11949 void
11950 prepare_target_option_nodes_for_pch (void)
11952 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11953 for (; iter != cl_option_hash_table->end (); ++iter)
11954 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11955 TREE_TARGET_GLOBALS (*iter) = NULL;
11958 /* Determine the "ultimate origin" of a block. */
11960 tree
11961 block_ultimate_origin (const_tree block)
11963 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11965 if (origin == NULL_TREE)
11966 return NULL_TREE;
11967 else
11969 gcc_checking_assert ((DECL_P (origin)
11970 && DECL_ORIGIN (origin) == origin)
11971 || BLOCK_ORIGIN (origin) == origin);
11972 return origin;
11976 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11977 no instruction. */
11979 bool
11980 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11982 /* Do not strip casts into or out of differing address spaces. */
11983 if (POINTER_TYPE_P (outer_type)
11984 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11986 if (!POINTER_TYPE_P (inner_type)
11987 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11988 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11989 return false;
11991 else if (POINTER_TYPE_P (inner_type)
11992 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11994 /* We already know that outer_type is not a pointer with
11995 a non-generic address space. */
11996 return false;
11999 /* Use precision rather then machine mode when we can, which gives
12000 the correct answer even for submode (bit-field) types. */
12001 if ((INTEGRAL_TYPE_P (outer_type)
12002 || POINTER_TYPE_P (outer_type)
12003 || TREE_CODE (outer_type) == OFFSET_TYPE)
12004 && (INTEGRAL_TYPE_P (inner_type)
12005 || POINTER_TYPE_P (inner_type)
12006 || TREE_CODE (inner_type) == OFFSET_TYPE))
12007 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12009 /* Otherwise fall back on comparing machine modes (e.g. for
12010 aggregate types, floats). */
12011 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12014 /* Return true iff conversion in EXP generates no instruction. Mark
12015 it inline so that we fully inline into the stripping functions even
12016 though we have two uses of this function. */
12018 static inline bool
12019 tree_nop_conversion (const_tree exp)
12021 tree outer_type, inner_type;
12023 if (location_wrapper_p (exp))
12024 return true;
12025 if (!CONVERT_EXPR_P (exp)
12026 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12027 return false;
12029 outer_type = TREE_TYPE (exp);
12030 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12031 if (!inner_type || inner_type == error_mark_node)
12032 return false;
12034 return tree_nop_conversion_p (outer_type, inner_type);
12037 /* Return true iff conversion in EXP generates no instruction. Don't
12038 consider conversions changing the signedness. */
12040 static bool
12041 tree_sign_nop_conversion (const_tree exp)
12043 tree outer_type, inner_type;
12045 if (!tree_nop_conversion (exp))
12046 return false;
12048 outer_type = TREE_TYPE (exp);
12049 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12051 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12052 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12055 /* Strip conversions from EXP according to tree_nop_conversion and
12056 return the resulting expression. */
12058 tree
12059 tree_strip_nop_conversions (tree exp)
12061 while (tree_nop_conversion (exp))
12062 exp = TREE_OPERAND (exp, 0);
12063 return exp;
12066 /* Strip conversions from EXP according to tree_sign_nop_conversion
12067 and return the resulting expression. */
12069 tree
12070 tree_strip_sign_nop_conversions (tree exp)
12072 while (tree_sign_nop_conversion (exp))
12073 exp = TREE_OPERAND (exp, 0);
12074 return exp;
12077 /* Avoid any floating point extensions from EXP. */
12078 tree
12079 strip_float_extensions (tree exp)
12081 tree sub, expt, subt;
12083 /* For floating point constant look up the narrowest type that can hold
12084 it properly and handle it like (type)(narrowest_type)constant.
12085 This way we can optimize for instance a=a*2.0 where "a" is float
12086 but 2.0 is double constant. */
12087 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12089 REAL_VALUE_TYPE orig;
12090 tree type = NULL;
12092 orig = TREE_REAL_CST (exp);
12093 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12094 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12095 type = float_type_node;
12096 else if (TYPE_PRECISION (TREE_TYPE (exp))
12097 > TYPE_PRECISION (double_type_node)
12098 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12099 type = double_type_node;
12100 if (type)
12101 return build_real_truncate (type, orig);
12104 if (!CONVERT_EXPR_P (exp))
12105 return exp;
12107 sub = TREE_OPERAND (exp, 0);
12108 subt = TREE_TYPE (sub);
12109 expt = TREE_TYPE (exp);
12111 if (!FLOAT_TYPE_P (subt))
12112 return exp;
12114 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12115 return exp;
12117 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12118 return exp;
12120 return strip_float_extensions (sub);
12123 /* Strip out all handled components that produce invariant
12124 offsets. */
12126 const_tree
12127 strip_invariant_refs (const_tree op)
12129 while (handled_component_p (op))
12131 switch (TREE_CODE (op))
12133 case ARRAY_REF:
12134 case ARRAY_RANGE_REF:
12135 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12136 || TREE_OPERAND (op, 2) != NULL_TREE
12137 || TREE_OPERAND (op, 3) != NULL_TREE)
12138 return NULL;
12139 break;
12141 case COMPONENT_REF:
12142 if (TREE_OPERAND (op, 2) != NULL_TREE)
12143 return NULL;
12144 break;
12146 default:;
12148 op = TREE_OPERAND (op, 0);
12151 return op;
12154 /* Strip handled components with zero offset from OP. */
12156 tree
12157 strip_zero_offset_components (tree op)
12159 while (TREE_CODE (op) == COMPONENT_REF
12160 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12161 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12162 op = TREE_OPERAND (op, 0);
12163 return op;
12166 static GTY(()) tree gcc_eh_personality_decl;
12168 /* Return the GCC personality function decl. */
12170 tree
12171 lhd_gcc_personality (void)
12173 if (!gcc_eh_personality_decl)
12174 gcc_eh_personality_decl = build_personality_function ("gcc");
12175 return gcc_eh_personality_decl;
12178 /* TARGET is a call target of GIMPLE call statement
12179 (obtained by gimple_call_fn). Return true if it is
12180 OBJ_TYPE_REF representing an virtual call of C++ method.
12181 (As opposed to OBJ_TYPE_REF representing objc calls
12182 through a cast where middle-end devirtualization machinery
12183 can't apply.) FOR_DUMP_P is true when being called from
12184 the dump routines. */
12186 bool
12187 virtual_method_call_p (const_tree target, bool for_dump_p)
12189 if (TREE_CODE (target) != OBJ_TYPE_REF)
12190 return false;
12191 tree t = TREE_TYPE (target);
12192 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12193 t = TREE_TYPE (t);
12194 if (TREE_CODE (t) == FUNCTION_TYPE)
12195 return false;
12196 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12197 /* If we do not have BINFO associated, it means that type was built
12198 without devirtualization enabled. Do not consider this a virtual
12199 call. */
12200 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12201 return false;
12202 return true;
12205 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12207 static tree
12208 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12210 unsigned int i;
12211 tree base_binfo, b;
12213 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12214 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12215 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12216 return base_binfo;
12217 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12218 return b;
12219 return NULL;
12222 /* Try to find a base info of BINFO that would have its field decl at offset
12223 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12224 found, return, otherwise return NULL_TREE. */
12226 tree
12227 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12229 tree type = BINFO_TYPE (binfo);
12231 while (true)
12233 HOST_WIDE_INT pos, size;
12234 tree fld;
12235 int i;
12237 if (types_same_for_odr (type, expected_type))
12238 return binfo;
12239 if (maybe_lt (offset, 0))
12240 return NULL_TREE;
12242 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12244 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12245 continue;
12247 pos = int_bit_position (fld);
12248 size = tree_to_uhwi (DECL_SIZE (fld));
12249 if (known_in_range_p (offset, pos, size))
12250 break;
12252 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12253 return NULL_TREE;
12255 /* Offset 0 indicates the primary base, whose vtable contents are
12256 represented in the binfo for the derived class. */
12257 else if (maybe_ne (offset, 0))
12259 tree found_binfo = NULL, base_binfo;
12260 /* Offsets in BINFO are in bytes relative to the whole structure
12261 while POS is in bits relative to the containing field. */
12262 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12263 / BITS_PER_UNIT);
12265 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12266 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12267 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12269 found_binfo = base_binfo;
12270 break;
12272 if (found_binfo)
12273 binfo = found_binfo;
12274 else
12275 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12276 binfo_offset);
12279 type = TREE_TYPE (fld);
12280 offset -= pos;
12284 /* PR 84195: Replace control characters in "unescaped" with their
12285 escaped equivalents. Allow newlines if -fmessage-length has
12286 been set to a non-zero value. This is done here, rather than
12287 where the attribute is recorded as the message length can
12288 change between these two locations. */
12290 void
12291 escaped_string::escape (const char *unescaped)
12293 char *escaped;
12294 size_t i, new_i, len;
12296 if (m_owned)
12297 free (m_str);
12299 m_str = const_cast<char *> (unescaped);
12300 m_owned = false;
12302 if (unescaped == NULL || *unescaped == 0)
12303 return;
12305 len = strlen (unescaped);
12306 escaped = NULL;
12307 new_i = 0;
12309 for (i = 0; i < len; i++)
12311 char c = unescaped[i];
12313 if (!ISCNTRL (c))
12315 if (escaped)
12316 escaped[new_i++] = c;
12317 continue;
12320 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12322 if (escaped == NULL)
12324 /* We only allocate space for a new string if we
12325 actually encounter a control character that
12326 needs replacing. */
12327 escaped = (char *) xmalloc (len * 2 + 1);
12328 strncpy (escaped, unescaped, i);
12329 new_i = i;
12332 escaped[new_i++] = '\\';
12334 switch (c)
12336 case '\a': escaped[new_i++] = 'a'; break;
12337 case '\b': escaped[new_i++] = 'b'; break;
12338 case '\f': escaped[new_i++] = 'f'; break;
12339 case '\n': escaped[new_i++] = 'n'; break;
12340 case '\r': escaped[new_i++] = 'r'; break;
12341 case '\t': escaped[new_i++] = 't'; break;
12342 case '\v': escaped[new_i++] = 'v'; break;
12343 default: escaped[new_i++] = '?'; break;
12346 else if (escaped)
12347 escaped[new_i++] = c;
12350 if (escaped)
12352 escaped[new_i] = 0;
12353 m_str = escaped;
12354 m_owned = true;
12358 /* Warn about a use of an identifier which was marked deprecated. Returns
12359 whether a warning was given. */
12361 bool
12362 warn_deprecated_use (tree node, tree attr)
12364 escaped_string msg;
12366 if (node == 0 || !warn_deprecated_decl)
12367 return false;
12369 if (!attr)
12371 if (DECL_P (node))
12372 attr = DECL_ATTRIBUTES (node);
12373 else if (TYPE_P (node))
12375 tree decl = TYPE_STUB_DECL (node);
12376 if (decl)
12377 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12378 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12379 != NULL_TREE)
12381 node = TREE_TYPE (decl);
12382 attr = TYPE_ATTRIBUTES (node);
12387 if (attr)
12388 attr = lookup_attribute ("deprecated", attr);
12390 if (attr)
12391 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12393 bool w = false;
12394 if (DECL_P (node))
12396 auto_diagnostic_group d;
12397 if (msg)
12398 w = warning (OPT_Wdeprecated_declarations,
12399 "%qD is deprecated: %s", node, (const char *) msg);
12400 else
12401 w = warning (OPT_Wdeprecated_declarations,
12402 "%qD is deprecated", node);
12403 if (w)
12404 inform (DECL_SOURCE_LOCATION (node), "declared here");
12406 else if (TYPE_P (node))
12408 tree what = NULL_TREE;
12409 tree decl = TYPE_STUB_DECL (node);
12411 if (TYPE_NAME (node))
12413 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12414 what = TYPE_NAME (node);
12415 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12416 && DECL_NAME (TYPE_NAME (node)))
12417 what = DECL_NAME (TYPE_NAME (node));
12420 auto_diagnostic_group d;
12421 if (what)
12423 if (msg)
12424 w = warning (OPT_Wdeprecated_declarations,
12425 "%qE is deprecated: %s", what, (const char *) msg);
12426 else
12427 w = warning (OPT_Wdeprecated_declarations,
12428 "%qE is deprecated", what);
12430 else
12432 if (msg)
12433 w = warning (OPT_Wdeprecated_declarations,
12434 "type is deprecated: %s", (const char *) msg);
12435 else
12436 w = warning (OPT_Wdeprecated_declarations,
12437 "type is deprecated");
12440 if (w && decl)
12441 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12444 return w;
12447 /* Error out with an identifier which was marked 'unavailable'. */
12448 void
12449 error_unavailable_use (tree node, tree attr)
12451 escaped_string msg;
12453 if (node == 0)
12454 return;
12456 if (!attr)
12458 if (DECL_P (node))
12459 attr = DECL_ATTRIBUTES (node);
12460 else if (TYPE_P (node))
12462 tree decl = TYPE_STUB_DECL (node);
12463 if (decl)
12464 attr = lookup_attribute ("unavailable",
12465 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12469 if (attr)
12470 attr = lookup_attribute ("unavailable", attr);
12472 if (attr)
12473 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12475 if (DECL_P (node))
12477 auto_diagnostic_group d;
12478 if (msg)
12479 error ("%qD is unavailable: %s", node, (const char *) msg);
12480 else
12481 error ("%qD is unavailable", node);
12482 inform (DECL_SOURCE_LOCATION (node), "declared here");
12484 else if (TYPE_P (node))
12486 tree what = NULL_TREE;
12487 tree decl = TYPE_STUB_DECL (node);
12489 if (TYPE_NAME (node))
12491 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12492 what = TYPE_NAME (node);
12493 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12494 && DECL_NAME (TYPE_NAME (node)))
12495 what = DECL_NAME (TYPE_NAME (node));
12498 auto_diagnostic_group d;
12499 if (what)
12501 if (msg)
12502 error ("%qE is unavailable: %s", what, (const char *) msg);
12503 else
12504 error ("%qE is unavailable", what);
12506 else
12508 if (msg)
12509 error ("type is unavailable: %s", (const char *) msg);
12510 else
12511 error ("type is unavailable");
12514 if (decl)
12515 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12519 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12520 somewhere in it. */
12522 bool
12523 contains_bitfld_component_ref_p (const_tree ref)
12525 while (handled_component_p (ref))
12527 if (TREE_CODE (ref) == COMPONENT_REF
12528 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12529 return true;
12530 ref = TREE_OPERAND (ref, 0);
12533 return false;
12536 /* Try to determine whether a TRY_CATCH expression can fall through.
12537 This is a subroutine of block_may_fallthru. */
12539 static bool
12540 try_catch_may_fallthru (const_tree stmt)
12542 tree_stmt_iterator i;
12544 /* If the TRY block can fall through, the whole TRY_CATCH can
12545 fall through. */
12546 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12547 return true;
12549 i = tsi_start (TREE_OPERAND (stmt, 1));
12550 switch (TREE_CODE (tsi_stmt (i)))
12552 case CATCH_EXPR:
12553 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12554 catch expression and a body. The whole TRY_CATCH may fall
12555 through iff any of the catch bodies falls through. */
12556 for (; !tsi_end_p (i); tsi_next (&i))
12558 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12559 return true;
12561 return false;
12563 case EH_FILTER_EXPR:
12564 /* The exception filter expression only matters if there is an
12565 exception. If the exception does not match EH_FILTER_TYPES,
12566 we will execute EH_FILTER_FAILURE, and we will fall through
12567 if that falls through. If the exception does match
12568 EH_FILTER_TYPES, the stack unwinder will continue up the
12569 stack, so we will not fall through. We don't know whether we
12570 will throw an exception which matches EH_FILTER_TYPES or not,
12571 so we just ignore EH_FILTER_TYPES and assume that we might
12572 throw an exception which doesn't match. */
12573 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12575 default:
12576 /* This case represents statements to be executed when an
12577 exception occurs. Those statements are implicitly followed
12578 by a RESX statement to resume execution after the exception.
12579 So in this case the TRY_CATCH never falls through. */
12580 return false;
12584 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12585 need not be 100% accurate; simply be conservative and return true if we
12586 don't know. This is used only to avoid stupidly generating extra code.
12587 If we're wrong, we'll just delete the extra code later. */
12589 bool
12590 block_may_fallthru (const_tree block)
12592 /* This CONST_CAST is okay because expr_last returns its argument
12593 unmodified and we assign it to a const_tree. */
12594 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12596 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12598 case GOTO_EXPR:
12599 case RETURN_EXPR:
12600 /* Easy cases. If the last statement of the block implies
12601 control transfer, then we can't fall through. */
12602 return false;
12604 case SWITCH_EXPR:
12605 /* If there is a default: label or case labels cover all possible
12606 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12607 to some case label in all cases and all we care is whether the
12608 SWITCH_BODY falls through. */
12609 if (SWITCH_ALL_CASES_P (stmt))
12610 return block_may_fallthru (SWITCH_BODY (stmt));
12611 return true;
12613 case COND_EXPR:
12614 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12615 return true;
12616 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12618 case BIND_EXPR:
12619 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12621 case TRY_CATCH_EXPR:
12622 return try_catch_may_fallthru (stmt);
12624 case TRY_FINALLY_EXPR:
12625 /* The finally clause is always executed after the try clause,
12626 so if it does not fall through, then the try-finally will not
12627 fall through. Otherwise, if the try clause does not fall
12628 through, then when the finally clause falls through it will
12629 resume execution wherever the try clause was going. So the
12630 whole try-finally will only fall through if both the try
12631 clause and the finally clause fall through. */
12632 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12633 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12635 case EH_ELSE_EXPR:
12636 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12638 case MODIFY_EXPR:
12639 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12640 stmt = TREE_OPERAND (stmt, 1);
12641 else
12642 return true;
12643 /* FALLTHRU */
12645 case CALL_EXPR:
12646 /* Functions that do not return do not fall through. */
12647 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12649 case CLEANUP_POINT_EXPR:
12650 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12652 case TARGET_EXPR:
12653 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12655 case ERROR_MARK:
12656 return true;
12658 default:
12659 return lang_hooks.block_may_fallthru (stmt);
12663 /* True if we are using EH to handle cleanups. */
12664 static bool using_eh_for_cleanups_flag = false;
12666 /* This routine is called from front ends to indicate eh should be used for
12667 cleanups. */
12668 void
12669 using_eh_for_cleanups (void)
12671 using_eh_for_cleanups_flag = true;
12674 /* Query whether EH is used for cleanups. */
12675 bool
12676 using_eh_for_cleanups_p (void)
12678 return using_eh_for_cleanups_flag;
12681 /* Wrapper for tree_code_name to ensure that tree code is valid */
12682 const char *
12683 get_tree_code_name (enum tree_code code)
12685 const char *invalid = "<invalid tree code>";
12687 /* The tree_code enum promotes to signed, but we could be getting
12688 invalid values, so force an unsigned comparison. */
12689 if (unsigned (code) >= MAX_TREE_CODES)
12691 if ((unsigned)code == 0xa5a5)
12692 return "ggc_freed";
12693 return invalid;
12696 return tree_code_name[code];
12699 /* Drops the TREE_OVERFLOW flag from T. */
12701 tree
12702 drop_tree_overflow (tree t)
12704 gcc_checking_assert (TREE_OVERFLOW (t));
12706 /* For tree codes with a sharing machinery re-build the result. */
12707 if (poly_int_tree_p (t))
12708 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12710 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12711 and canonicalize the result. */
12712 if (TREE_CODE (t) == VECTOR_CST)
12714 tree_vector_builder builder;
12715 builder.new_unary_operation (TREE_TYPE (t), t, true);
12716 unsigned int count = builder.encoded_nelts ();
12717 for (unsigned int i = 0; i < count; ++i)
12719 tree elt = VECTOR_CST_ELT (t, i);
12720 if (TREE_OVERFLOW (elt))
12721 elt = drop_tree_overflow (elt);
12722 builder.quick_push (elt);
12724 return builder.build ();
12727 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12728 and drop the flag. */
12729 t = copy_node (t);
12730 TREE_OVERFLOW (t) = 0;
12732 /* For constants that contain nested constants, drop the flag
12733 from those as well. */
12734 if (TREE_CODE (t) == COMPLEX_CST)
12736 if (TREE_OVERFLOW (TREE_REALPART (t)))
12737 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12738 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12739 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12742 return t;
12745 /* Given a memory reference expression T, return its base address.
12746 The base address of a memory reference expression is the main
12747 object being referenced. For instance, the base address for
12748 'array[i].fld[j]' is 'array'. You can think of this as stripping
12749 away the offset part from a memory address.
12751 This function calls handled_component_p to strip away all the inner
12752 parts of the memory reference until it reaches the base object. */
12754 tree
12755 get_base_address (tree t)
12757 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12758 t = TREE_OPERAND (t, 0);
12759 while (handled_component_p (t))
12760 t = TREE_OPERAND (t, 0);
12762 if ((TREE_CODE (t) == MEM_REF
12763 || TREE_CODE (t) == TARGET_MEM_REF)
12764 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12765 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12767 return t;
12770 /* Return a tree of sizetype representing the size, in bytes, of the element
12771 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12773 tree
12774 array_ref_element_size (tree exp)
12776 tree aligned_size = TREE_OPERAND (exp, 3);
12777 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12778 location_t loc = EXPR_LOCATION (exp);
12780 /* If a size was specified in the ARRAY_REF, it's the size measured
12781 in alignment units of the element type. So multiply by that value. */
12782 if (aligned_size)
12784 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12785 sizetype from another type of the same width and signedness. */
12786 if (TREE_TYPE (aligned_size) != sizetype)
12787 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12788 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12789 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12792 /* Otherwise, take the size from that of the element type. Substitute
12793 any PLACEHOLDER_EXPR that we have. */
12794 else
12795 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12798 /* Return a tree representing the lower bound of the array mentioned in
12799 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12801 tree
12802 array_ref_low_bound (tree exp)
12804 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12806 /* If a lower bound is specified in EXP, use it. */
12807 if (TREE_OPERAND (exp, 2))
12808 return TREE_OPERAND (exp, 2);
12810 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12811 substituting for a PLACEHOLDER_EXPR as needed. */
12812 if (domain_type && TYPE_MIN_VALUE (domain_type))
12813 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12815 /* Otherwise, return a zero of the appropriate type. */
12816 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12817 return (idxtype == error_mark_node
12818 ? integer_zero_node : build_int_cst (idxtype, 0));
12821 /* Return a tree representing the upper bound of the array mentioned in
12822 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12824 tree
12825 array_ref_up_bound (tree exp)
12827 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12829 /* If there is a domain type and it has an upper bound, use it, substituting
12830 for a PLACEHOLDER_EXPR as needed. */
12831 if (domain_type && TYPE_MAX_VALUE (domain_type))
12832 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12834 /* Otherwise fail. */
12835 return NULL_TREE;
12838 /* Returns true if REF is an array reference, a component reference,
12839 or a memory reference to an array whose actual size might be larger
12840 than its upper bound implies, there are multiple cases:
12841 A. a ref to a flexible array member at the end of a structure;
12842 B. a ref to an array with a different type against the original decl;
12843 for example:
12845 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12846 (*((char(*)[16])&a[0]))[i+8]
12848 C. a ref to an array that was passed as a parameter;
12849 for example:
12851 int test (uint8_t *p, uint32_t t[1][1], int n) {
12852 for (int i = 0; i < 4; i++, p++)
12853 t[i][0] = ...;
12855 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12858 bool
12859 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12861 /* The TYPE for this array referece. */
12862 tree atype = NULL_TREE;
12863 /* The FIELD_DECL for the array field in the containing structure. */
12864 tree afield_decl = NULL_TREE;
12865 /* Whether this array is the trailing array of a structure. */
12866 bool is_trailing_array_tmp = false;
12867 if (!is_trailing_array)
12868 is_trailing_array = &is_trailing_array_tmp;
12870 if (TREE_CODE (ref) == ARRAY_REF
12871 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12873 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12874 ref = TREE_OPERAND (ref, 0);
12876 else if (TREE_CODE (ref) == COMPONENT_REF
12877 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12879 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12880 afield_decl = TREE_OPERAND (ref, 1);
12882 else if (TREE_CODE (ref) == MEM_REF)
12884 tree arg = TREE_OPERAND (ref, 0);
12885 if (TREE_CODE (arg) == ADDR_EXPR)
12886 arg = TREE_OPERAND (arg, 0);
12887 tree argtype = TREE_TYPE (arg);
12888 if (TREE_CODE (argtype) == RECORD_TYPE)
12890 if (tree fld = last_field (argtype))
12892 atype = TREE_TYPE (fld);
12893 afield_decl = fld;
12894 if (TREE_CODE (atype) != ARRAY_TYPE)
12895 return false;
12896 if (VAR_P (arg) && DECL_SIZE (fld))
12897 return false;
12899 else
12900 return false;
12902 else
12903 return false;
12905 else
12906 return false;
12908 if (TREE_CODE (ref) == STRING_CST)
12909 return false;
12911 tree ref_to_array = ref;
12912 while (handled_component_p (ref))
12914 /* If the reference chain contains a component reference to a
12915 non-union type and there follows another field the reference
12916 is not at the end of a structure. */
12917 if (TREE_CODE (ref) == COMPONENT_REF)
12919 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12921 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12922 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12923 nextf = DECL_CHAIN (nextf);
12924 if (nextf)
12925 return false;
12928 /* If we have a multi-dimensional array we do not consider
12929 a non-innermost dimension as flex array if the whole
12930 multi-dimensional array is at struct end.
12931 Same for an array of aggregates with a trailing array
12932 member. */
12933 else if (TREE_CODE (ref) == ARRAY_REF)
12934 return false;
12935 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12937 /* If we view an underlying object as sth else then what we
12938 gathered up to now is what we have to rely on. */
12939 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12940 break;
12941 else
12942 gcc_unreachable ();
12944 ref = TREE_OPERAND (ref, 0);
12947 gcc_assert (!afield_decl
12948 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12950 /* The array now is at struct end. Treat flexible array member as
12951 always subject to extend, even into just padding constrained by
12952 an underlying decl. */
12953 if (! TYPE_SIZE (atype)
12954 || ! TYPE_DOMAIN (atype)
12955 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12957 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12958 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12961 /* If the reference is based on a declared entity, the size of the array
12962 is constrained by its given domain. (Do not trust commons PR/69368). */
12963 ref = get_base_address (ref);
12964 if (ref
12965 && DECL_P (ref)
12966 && !(flag_unconstrained_commons
12967 && VAR_P (ref) && DECL_COMMON (ref))
12968 && DECL_SIZE_UNIT (ref)
12969 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12971 /* If the object itself is the array it is not at struct end. */
12972 if (DECL_P (ref_to_array))
12973 return false;
12975 /* Check whether the array domain covers all of the available
12976 padding. */
12977 poly_int64 offset;
12978 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12979 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12980 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12982 *is_trailing_array
12983 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12984 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12986 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12988 *is_trailing_array
12989 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12990 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12993 /* If at least one extra element fits it is a flexarray. */
12994 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12995 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12996 + 2)
12997 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12998 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13000 *is_trailing_array
13001 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13002 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13005 return false;
13008 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13009 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13013 /* Return a tree representing the offset, in bytes, of the field referenced
13014 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13016 tree
13017 component_ref_field_offset (tree exp)
13019 tree aligned_offset = TREE_OPERAND (exp, 2);
13020 tree field = TREE_OPERAND (exp, 1);
13021 location_t loc = EXPR_LOCATION (exp);
13023 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13024 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13025 value. */
13026 if (aligned_offset)
13028 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13029 sizetype from another type of the same width and signedness. */
13030 if (TREE_TYPE (aligned_offset) != sizetype)
13031 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13032 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13033 size_int (DECL_OFFSET_ALIGN (field)
13034 / BITS_PER_UNIT));
13037 /* Otherwise, take the offset from that of the field. Substitute
13038 any PLACEHOLDER_EXPR that we have. */
13039 else
13040 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13043 /* Given the initializer INIT, return the initializer for the field
13044 DECL if it exists, otherwise null. Used to obtain the initializer
13045 for a flexible array member and determine its size. */
13047 static tree
13048 get_initializer_for (tree init, tree decl)
13050 STRIP_NOPS (init);
13052 tree fld, fld_init;
13053 unsigned HOST_WIDE_INT i;
13054 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13056 if (decl == fld)
13057 return fld_init;
13059 if (TREE_CODE (fld) == CONSTRUCTOR)
13061 fld_init = get_initializer_for (fld_init, decl);
13062 if (fld_init)
13063 return fld_init;
13067 return NULL_TREE;
13070 /* Determines the special array member type for the array reference REF. */
13071 special_array_member
13072 component_ref_sam_type (tree ref)
13074 special_array_member sam_type = special_array_member::none;
13076 tree member = TREE_OPERAND (ref, 1);
13077 tree memsize = DECL_SIZE_UNIT (member);
13078 if (memsize)
13080 tree memtype = TREE_TYPE (member);
13081 if (TREE_CODE (memtype) != ARRAY_TYPE)
13082 return sam_type;
13084 bool trailing = false;
13085 (void) array_ref_flexible_size_p (ref, &trailing);
13086 bool zero_elts = integer_zerop (memsize);
13087 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13089 /* If array element has zero size, verify if it is a flexible
13090 array member or zero length array. Clear zero_elts if
13091 it has one or more members or is a VLA member. */
13092 if (tree dom = TYPE_DOMAIN (memtype))
13093 if (tree min = TYPE_MIN_VALUE (dom))
13094 if (tree max = TYPE_MAX_VALUE (dom))
13095 if (TREE_CODE (min) != INTEGER_CST
13096 || TREE_CODE (max) != INTEGER_CST
13097 || !((integer_zerop (min) && integer_all_onesp (max))
13098 || tree_int_cst_lt (max, min)))
13099 zero_elts = false;
13101 if (!trailing && !zero_elts)
13102 /* MEMBER is an interior array with more than one element. */
13103 return special_array_member::int_n;
13105 if (zero_elts)
13107 if (trailing)
13108 return special_array_member::trail_0;
13109 else
13110 return special_array_member::int_0;
13113 if (!zero_elts)
13114 if (tree dom = TYPE_DOMAIN (memtype))
13115 if (tree min = TYPE_MIN_VALUE (dom))
13116 if (tree max = TYPE_MAX_VALUE (dom))
13117 if (TREE_CODE (min) == INTEGER_CST
13118 && TREE_CODE (max) == INTEGER_CST)
13120 offset_int minidx = wi::to_offset (min);
13121 offset_int maxidx = wi::to_offset (max);
13122 offset_int neltsm1 = maxidx - minidx;
13123 if (neltsm1 > 0)
13124 /* MEMBER is a trailing array with more than
13125 one elements. */
13126 return special_array_member::trail_n;
13128 if (neltsm1 == 0)
13129 return special_array_member::trail_1;
13133 return sam_type;
13136 /* Determines the size of the member referenced by the COMPONENT_REF
13137 REF, using its initializer expression if necessary in order to
13138 determine the size of an initialized flexible array member.
13139 If non-null, set *SAM to the type of special array member.
13140 Returns the size as sizetype (which might be zero for an object
13141 with an uninitialized flexible array member) or null if the size
13142 cannot be determined. */
13144 tree
13145 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13147 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13149 special_array_member sambuf;
13150 if (!sam)
13151 sam = &sambuf;
13152 *sam = component_ref_sam_type (ref);
13154 /* The object/argument referenced by the COMPONENT_REF and its type. */
13155 tree arg = TREE_OPERAND (ref, 0);
13156 tree argtype = TREE_TYPE (arg);
13157 /* The referenced member. */
13158 tree member = TREE_OPERAND (ref, 1);
13160 tree memsize = DECL_SIZE_UNIT (member);
13161 if (memsize)
13163 tree memtype = TREE_TYPE (member);
13164 if (TREE_CODE (memtype) != ARRAY_TYPE)
13165 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13166 to the type of a class with a virtual base which doesn't
13167 reflect the size of the virtual's members (see pr97595).
13168 If that's the case fail for now and implement something
13169 more robust in the future. */
13170 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13171 ? memsize : NULL_TREE);
13173 /* 2-or-more elements arrays are treated as normal arrays by default. */
13174 if (*sam == special_array_member::int_n
13175 || *sam == special_array_member::trail_n)
13176 return memsize;
13178 tree afield_decl = TREE_OPERAND (ref, 1);
13179 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13180 /* If the trailing array is a not a flexible array member, treat it as
13181 a normal array. */
13182 if (DECL_NOT_FLEXARRAY (afield_decl)
13183 && *sam != special_array_member::int_0)
13184 return memsize;
13186 if (*sam == special_array_member::int_0)
13187 memsize = NULL_TREE;
13189 /* For a reference to a flexible array member of a union
13190 use the size of the union instead of the size of the member. */
13191 if (TREE_CODE (argtype) == UNION_TYPE)
13192 memsize = TYPE_SIZE_UNIT (argtype);
13195 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13196 array member, or an array of length one treated as such. */
13198 /* If the reference is to a declared object and the member a true
13199 flexible array, try to determine its size from its initializer. */
13200 poly_int64 baseoff = 0;
13201 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13202 if (!base || !VAR_P (base))
13204 if (*sam != special_array_member::int_0)
13205 return NULL_TREE;
13207 if (TREE_CODE (arg) != COMPONENT_REF)
13208 return NULL_TREE;
13210 base = arg;
13211 while (TREE_CODE (base) == COMPONENT_REF)
13212 base = TREE_OPERAND (base, 0);
13213 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13216 /* BASE is the declared object of which MEMBER is either a member
13217 or that is cast to ARGTYPE (e.g., a char buffer used to store
13218 an ARGTYPE object). */
13219 tree basetype = TREE_TYPE (base);
13221 /* Determine the base type of the referenced object. If it's
13222 the same as ARGTYPE and MEMBER has a known size, return it. */
13223 tree bt = basetype;
13224 if (*sam != special_array_member::int_0)
13225 while (TREE_CODE (bt) == ARRAY_TYPE)
13226 bt = TREE_TYPE (bt);
13227 bool typematch = useless_type_conversion_p (argtype, bt);
13228 if (memsize && typematch)
13229 return memsize;
13231 memsize = NULL_TREE;
13233 if (typematch)
13234 /* MEMBER is a true flexible array member. Compute its size from
13235 the initializer of the BASE object if it has one. */
13236 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13237 if (init != error_mark_node)
13239 init = get_initializer_for (init, member);
13240 if (init)
13242 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13243 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13245 /* Use the larger of the initializer size and the tail
13246 padding in the enclosing struct. */
13247 poly_int64 rsz = tree_to_poly_int64 (refsize);
13248 rsz -= baseoff;
13249 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13250 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13253 baseoff = 0;
13257 if (!memsize)
13259 if (typematch)
13261 if (DECL_P (base)
13262 && DECL_EXTERNAL (base)
13263 && bt == basetype
13264 && *sam != special_array_member::int_0)
13265 /* The size of a flexible array member of an extern struct
13266 with no initializer cannot be determined (it's defined
13267 in another translation unit and can have an initializer
13268 with an arbitrary number of elements). */
13269 return NULL_TREE;
13271 /* Use the size of the base struct or, for interior zero-length
13272 arrays, the size of the enclosing type. */
13273 memsize = TYPE_SIZE_UNIT (bt);
13275 else if (DECL_P (base))
13276 /* Use the size of the BASE object (possibly an array of some
13277 other type such as char used to store the struct). */
13278 memsize = DECL_SIZE_UNIT (base);
13279 else
13280 return NULL_TREE;
13283 /* If the flexible array member has a known size use the greater
13284 of it and the tail padding in the enclosing struct.
13285 Otherwise, when the size of the flexible array member is unknown
13286 and the referenced object is not a struct, use the size of its
13287 type when known. This detects sizes of array buffers when cast
13288 to struct types with flexible array members. */
13289 if (memsize)
13291 if (!tree_fits_poly_int64_p (memsize))
13292 return NULL_TREE;
13293 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13294 if (known_lt (baseoff, memsz64))
13296 memsz64 -= baseoff;
13297 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13299 return size_zero_node;
13302 /* Return "don't know" for an external non-array object since its
13303 flexible array member can be initialized to have any number of
13304 elements. Otherwise, return zero because the flexible array
13305 member has no elements. */
13306 return (DECL_P (base)
13307 && DECL_EXTERNAL (base)
13308 && (!typematch
13309 || TREE_CODE (basetype) != ARRAY_TYPE)
13310 ? NULL_TREE : size_zero_node);
13313 /* Return the machine mode of T. For vectors, returns the mode of the
13314 inner type. The main use case is to feed the result to HONOR_NANS,
13315 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13317 machine_mode
13318 element_mode (const_tree t)
13320 if (!TYPE_P (t))
13321 t = TREE_TYPE (t);
13322 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13323 t = TREE_TYPE (t);
13324 return TYPE_MODE (t);
13327 /* Vector types need to re-check the target flags each time we report
13328 the machine mode. We need to do this because attribute target can
13329 change the result of vector_mode_supported_p and have_regs_of_mode
13330 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13331 change on a per-function basis. */
13332 /* ??? Possibly a better solution is to run through all the types
13333 referenced by a function and re-compute the TYPE_MODE once, rather
13334 than make the TYPE_MODE macro call a function. */
13336 machine_mode
13337 vector_type_mode (const_tree t)
13339 machine_mode mode;
13341 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13343 mode = t->type_common.mode;
13344 if (VECTOR_MODE_P (mode)
13345 && (!targetm.vector_mode_supported_p (mode)
13346 || !have_regs_of_mode[mode]))
13348 scalar_int_mode innermode;
13350 /* For integers, try mapping it to a same-sized scalar mode. */
13351 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13353 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13354 * GET_MODE_BITSIZE (innermode));
13355 scalar_int_mode mode;
13356 if (int_mode_for_size (size, 0).exists (&mode)
13357 && have_regs_of_mode[mode])
13358 return mode;
13361 return BLKmode;
13364 return mode;
13367 /* Return the size in bits of each element of vector type TYPE. */
13369 unsigned int
13370 vector_element_bits (const_tree type)
13372 gcc_checking_assert (VECTOR_TYPE_P (type));
13373 if (VECTOR_BOOLEAN_TYPE_P (type))
13374 return TYPE_PRECISION (TREE_TYPE (type));
13375 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13378 /* Calculate the size in bits of each element of vector type TYPE
13379 and return the result as a tree of type bitsizetype. */
13381 tree
13382 vector_element_bits_tree (const_tree type)
13384 gcc_checking_assert (VECTOR_TYPE_P (type));
13385 if (VECTOR_BOOLEAN_TYPE_P (type))
13386 return bitsize_int (vector_element_bits (type));
13387 return TYPE_SIZE (TREE_TYPE (type));
13390 /* Verify that basic properties of T match TV and thus T can be a variant of
13391 TV. TV should be the more specified variant (i.e. the main variant). */
13393 static bool
13394 verify_type_variant (const_tree t, tree tv)
13396 /* Type variant can differ by:
13398 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13399 ENCODE_QUAL_ADDR_SPACE.
13400 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13401 in this case some values may not be set in the variant types
13402 (see TYPE_COMPLETE_P checks).
13403 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13404 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13405 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13406 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13407 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13408 this is necessary to make it possible to merge types form different TUs
13409 - arrays, pointers and references may have TREE_TYPE that is a variant
13410 of TREE_TYPE of their main variants.
13411 - aggregates may have new TYPE_FIELDS list that list variants of
13412 the main variant TYPE_FIELDS.
13413 - vector types may differ by TYPE_VECTOR_OPAQUE
13416 /* Convenience macro for matching individual fields. */
13417 #define verify_variant_match(flag) \
13418 do { \
13419 if (flag (tv) != flag (t)) \
13421 error ("type variant differs by %s", #flag); \
13422 debug_tree (tv); \
13423 return false; \
13425 } while (false)
13427 /* tree_base checks. */
13429 verify_variant_match (TREE_CODE);
13430 /* FIXME: Ada builds non-artificial variants of artificial types. */
13431 #if 0
13432 if (TYPE_ARTIFICIAL (tv))
13433 verify_variant_match (TYPE_ARTIFICIAL);
13434 #endif
13435 if (POINTER_TYPE_P (tv))
13436 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13437 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13438 verify_variant_match (TYPE_UNSIGNED);
13439 verify_variant_match (TYPE_PACKED);
13440 if (TREE_CODE (t) == REFERENCE_TYPE)
13441 verify_variant_match (TYPE_REF_IS_RVALUE);
13442 if (AGGREGATE_TYPE_P (t))
13443 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13444 else
13445 verify_variant_match (TYPE_SATURATING);
13446 /* FIXME: This check trigger during libstdc++ build. */
13447 #if 0
13448 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13449 verify_variant_match (TYPE_FINAL_P);
13450 #endif
13452 /* tree_type_common checks. */
13454 if (COMPLETE_TYPE_P (t))
13456 verify_variant_match (TYPE_MODE);
13457 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13458 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13459 verify_variant_match (TYPE_SIZE);
13460 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13461 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13462 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13464 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13465 TYPE_SIZE_UNIT (tv), 0));
13466 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13467 debug_tree (tv);
13468 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13469 debug_tree (TYPE_SIZE_UNIT (tv));
13470 error ("type%'s %<TYPE_SIZE_UNIT%>");
13471 debug_tree (TYPE_SIZE_UNIT (t));
13472 return false;
13474 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13476 verify_variant_match (TYPE_PRECISION_RAW);
13477 if (RECORD_OR_UNION_TYPE_P (t))
13478 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13479 else if (TREE_CODE (t) == ARRAY_TYPE)
13480 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13481 /* During LTO we merge variant lists from diferent translation units
13482 that may differ BY TYPE_CONTEXT that in turn may point
13483 to TRANSLATION_UNIT_DECL.
13484 Ada also builds variants of types with different TYPE_CONTEXT. */
13485 #if 0
13486 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13487 verify_variant_match (TYPE_CONTEXT);
13488 #endif
13489 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13490 verify_variant_match (TYPE_STRING_FLAG);
13491 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13492 verify_variant_match (TYPE_CXX_ODR_P);
13493 if (TYPE_ALIAS_SET_KNOWN_P (t))
13495 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13496 debug_tree (tv);
13497 return false;
13500 /* tree_type_non_common checks. */
13502 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13503 and dangle the pointer from time to time. */
13504 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13505 && (in_lto_p || !TYPE_VFIELD (tv)
13506 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13508 error ("type variant has different %<TYPE_VFIELD%>");
13509 debug_tree (tv);
13510 return false;
13512 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13513 || TREE_CODE (t) == INTEGER_TYPE
13514 || TREE_CODE (t) == BOOLEAN_TYPE
13515 || TREE_CODE (t) == BITINT_TYPE
13516 || SCALAR_FLOAT_TYPE_P (t)
13517 || FIXED_POINT_TYPE_P (t))
13519 verify_variant_match (TYPE_MAX_VALUE);
13520 verify_variant_match (TYPE_MIN_VALUE);
13522 if (TREE_CODE (t) == METHOD_TYPE)
13523 verify_variant_match (TYPE_METHOD_BASETYPE);
13524 if (TREE_CODE (t) == OFFSET_TYPE)
13525 verify_variant_match (TYPE_OFFSET_BASETYPE);
13526 if (TREE_CODE (t) == ARRAY_TYPE)
13527 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13528 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13529 or even type's main variant. This is needed to make bootstrap pass
13530 and the bug seems new in GCC 5.
13531 C++ FE should be updated to make this consistent and we should check
13532 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13533 is a match with main variant.
13535 Also disable the check for Java for now because of parser hack that builds
13536 first an dummy BINFO and then sometimes replace it by real BINFO in some
13537 of the copies. */
13538 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13539 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13540 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13541 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13542 at LTO time only. */
13543 && (in_lto_p && odr_type_p (t)))
13545 error ("type variant has different %<TYPE_BINFO%>");
13546 debug_tree (tv);
13547 error ("type variant%'s %<TYPE_BINFO%>");
13548 debug_tree (TYPE_BINFO (tv));
13549 error ("type%'s %<TYPE_BINFO%>");
13550 debug_tree (TYPE_BINFO (t));
13551 return false;
13554 /* Check various uses of TYPE_VALUES_RAW. */
13555 if (TREE_CODE (t) == ENUMERAL_TYPE
13556 && TYPE_VALUES (t))
13557 verify_variant_match (TYPE_VALUES);
13558 else if (TREE_CODE (t) == ARRAY_TYPE)
13559 verify_variant_match (TYPE_DOMAIN);
13560 /* Permit incomplete variants of complete type. While FEs may complete
13561 all variants, this does not happen for C++ templates in all cases. */
13562 else if (RECORD_OR_UNION_TYPE_P (t)
13563 && COMPLETE_TYPE_P (t)
13564 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13566 tree f1, f2;
13568 /* Fortran builds qualified variants as new records with items of
13569 qualified type. Verify that they looks same. */
13570 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13571 f1 && f2;
13572 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13573 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13574 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13575 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13576 /* FIXME: gfc_nonrestricted_type builds all types as variants
13577 with exception of pointer types. It deeply copies the type
13578 which means that we may end up with a variant type
13579 referring non-variant pointer. We may change it to
13580 produce types as variants, too, like
13581 objc_get_protocol_qualified_type does. */
13582 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13583 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13584 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13585 break;
13586 if (f1 || f2)
13588 error ("type variant has different %<TYPE_FIELDS%>");
13589 debug_tree (tv);
13590 error ("first mismatch is field");
13591 debug_tree (f1);
13592 error ("and field");
13593 debug_tree (f2);
13594 return false;
13597 else if (FUNC_OR_METHOD_TYPE_P (t))
13598 verify_variant_match (TYPE_ARG_TYPES);
13599 /* For C++ the qualified variant of array type is really an array type
13600 of qualified TREE_TYPE.
13601 objc builds variants of pointer where pointer to type is a variant, too
13602 in objc_get_protocol_qualified_type. */
13603 if (TREE_TYPE (t) != TREE_TYPE (tv)
13604 && ((TREE_CODE (t) != ARRAY_TYPE
13605 && !POINTER_TYPE_P (t))
13606 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13607 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13609 error ("type variant has different %<TREE_TYPE%>");
13610 debug_tree (tv);
13611 error ("type variant%'s %<TREE_TYPE%>");
13612 debug_tree (TREE_TYPE (tv));
13613 error ("type%'s %<TREE_TYPE%>");
13614 debug_tree (TREE_TYPE (t));
13615 return false;
13617 if (type_with_alias_set_p (t)
13618 && !gimple_canonical_types_compatible_p (t, tv, false))
13620 error ("type is not compatible with its variant");
13621 debug_tree (tv);
13622 error ("type variant%'s %<TREE_TYPE%>");
13623 debug_tree (TREE_TYPE (tv));
13624 error ("type%'s %<TREE_TYPE%>");
13625 debug_tree (TREE_TYPE (t));
13626 return false;
13628 return true;
13629 #undef verify_variant_match
13633 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13634 the middle-end types_compatible_p function. It needs to avoid
13635 claiming types are different for types that should be treated
13636 the same with respect to TBAA. Canonical types are also used
13637 for IL consistency checks via the useless_type_conversion_p
13638 predicate which does not handle all type kinds itself but falls
13639 back to pointer-comparison of TYPE_CANONICAL for aggregates
13640 for example. */
13642 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13643 type calculation because we need to allow inter-operability between signed
13644 and unsigned variants. */
13646 bool
13647 type_with_interoperable_signedness (const_tree type)
13649 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13650 signed char and unsigned char. Similarly fortran FE builds
13651 C_SIZE_T as signed type, while C defines it unsigned. */
13653 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13654 == INTEGER_TYPE
13655 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13656 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13659 /* Return true iff T1 and T2 are structurally identical for what
13660 TBAA is concerned.
13661 This function is used both by lto.cc canonical type merging and by the
13662 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13663 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13664 only for LTO because only in these cases TYPE_CANONICAL equivalence
13665 correspond to one defined by gimple_canonical_types_compatible_p. */
13667 bool
13668 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13669 bool trust_type_canonical)
13671 /* Type variants should be same as the main variant. When not doing sanity
13672 checking to verify this fact, go to main variants and save some work. */
13673 if (trust_type_canonical)
13675 t1 = TYPE_MAIN_VARIANT (t1);
13676 t2 = TYPE_MAIN_VARIANT (t2);
13679 /* Check first for the obvious case of pointer identity. */
13680 if (t1 == t2)
13681 return true;
13683 /* Check that we have two types to compare. */
13684 if (t1 == NULL_TREE || t2 == NULL_TREE)
13685 return false;
13687 /* We consider complete types always compatible with incomplete type.
13688 This does not make sense for canonical type calculation and thus we
13689 need to ensure that we are never called on it.
13691 FIXME: For more correctness the function probably should have three modes
13692 1) mode assuming that types are complete mathcing their structure
13693 2) mode allowing incomplete types but producing equivalence classes
13694 and thus ignoring all info from complete types
13695 3) mode allowing incomplete types to match complete but checking
13696 compatibility between complete types.
13698 1 and 2 can be used for canonical type calculation. 3 is the real
13699 definition of type compatibility that can be used i.e. for warnings during
13700 declaration merging. */
13702 gcc_assert (!trust_type_canonical
13703 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13705 /* If the types have been previously registered and found equal
13706 they still are. */
13708 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13709 && trust_type_canonical)
13711 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13712 they are always NULL, but they are set to non-NULL for types
13713 constructed by build_pointer_type and variants. In this case the
13714 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13715 all pointers are considered equal. Be sure to not return false
13716 negatives. */
13717 gcc_checking_assert (canonical_type_used_p (t1)
13718 && canonical_type_used_p (t2));
13719 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13722 /* For types where we do ODR based TBAA the canonical type is always
13723 set correctly, so we know that types are different if their
13724 canonical types does not match. */
13725 if (trust_type_canonical
13726 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13727 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13728 return false;
13730 /* Can't be the same type if the types don't have the same code. */
13731 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13732 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13733 return false;
13735 /* Qualifiers do not matter for canonical type comparison purposes. */
13737 /* Void types and nullptr types are always the same. */
13738 if (VOID_TYPE_P (t1)
13739 || TREE_CODE (t1) == NULLPTR_TYPE)
13740 return true;
13742 /* Can't be the same type if they have different mode. */
13743 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13744 return false;
13746 /* Non-aggregate types can be handled cheaply. */
13747 if (INTEGRAL_TYPE_P (t1)
13748 || SCALAR_FLOAT_TYPE_P (t1)
13749 || FIXED_POINT_TYPE_P (t1)
13750 || VECTOR_TYPE_P (t1)
13751 || TREE_CODE (t1) == COMPLEX_TYPE
13752 || TREE_CODE (t1) == OFFSET_TYPE
13753 || POINTER_TYPE_P (t1))
13755 /* Can't be the same type if they have different precision. */
13756 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13757 return false;
13759 /* In some cases the signed and unsigned types are required to be
13760 inter-operable. */
13761 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13762 && !type_with_interoperable_signedness (t1))
13763 return false;
13765 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13766 interoperable with "signed char". Unless all frontends are revisited
13767 to agree on these types, we must ignore the flag completely. */
13769 /* Fortran standard define C_PTR type that is compatible with every
13770 C pointer. For this reason we need to glob all pointers into one.
13771 Still pointers in different address spaces are not compatible. */
13772 if (POINTER_TYPE_P (t1))
13774 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13775 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13776 return false;
13779 /* Tail-recurse to components. */
13780 if (VECTOR_TYPE_P (t1)
13781 || TREE_CODE (t1) == COMPLEX_TYPE)
13782 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13783 TREE_TYPE (t2),
13784 trust_type_canonical);
13786 return true;
13789 /* Do type-specific comparisons. */
13790 switch (TREE_CODE (t1))
13792 case ARRAY_TYPE:
13793 /* Array types are the same if the element types are the same and
13794 the number of elements are the same. */
13795 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13796 trust_type_canonical)
13797 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13798 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13799 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13800 return false;
13801 else
13803 tree i1 = TYPE_DOMAIN (t1);
13804 tree i2 = TYPE_DOMAIN (t2);
13806 /* For an incomplete external array, the type domain can be
13807 NULL_TREE. Check this condition also. */
13808 if (i1 == NULL_TREE && i2 == NULL_TREE)
13809 return true;
13810 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13811 return false;
13812 else
13814 tree min1 = TYPE_MIN_VALUE (i1);
13815 tree min2 = TYPE_MIN_VALUE (i2);
13816 tree max1 = TYPE_MAX_VALUE (i1);
13817 tree max2 = TYPE_MAX_VALUE (i2);
13819 /* The minimum/maximum values have to be the same. */
13820 if ((min1 == min2
13821 || (min1 && min2
13822 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13823 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13824 || operand_equal_p (min1, min2, 0))))
13825 && (max1 == max2
13826 || (max1 && max2
13827 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13828 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13829 || operand_equal_p (max1, max2, 0)))))
13830 return true;
13831 else
13832 return false;
13836 case METHOD_TYPE:
13837 case FUNCTION_TYPE:
13838 /* Function types are the same if the return type and arguments types
13839 are the same. */
13840 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13841 trust_type_canonical))
13842 return false;
13844 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13845 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13846 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13847 return true;
13848 else
13850 tree parms1, parms2;
13852 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13853 parms1 && parms2;
13854 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13856 if (!gimple_canonical_types_compatible_p
13857 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13858 trust_type_canonical))
13859 return false;
13862 if (parms1 || parms2)
13863 return false;
13865 return true;
13868 case RECORD_TYPE:
13869 case UNION_TYPE:
13870 case QUAL_UNION_TYPE:
13872 tree f1, f2;
13874 /* Don't try to compare variants of an incomplete type, before
13875 TYPE_FIELDS has been copied around. */
13876 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13877 return true;
13880 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13881 return false;
13883 /* For aggregate types, all the fields must be the same. */
13884 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13885 f1 || f2;
13886 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13888 /* Skip non-fields and zero-sized fields. */
13889 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13890 || (DECL_SIZE (f1)
13891 && integer_zerop (DECL_SIZE (f1)))))
13892 f1 = TREE_CHAIN (f1);
13893 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13894 || (DECL_SIZE (f2)
13895 && integer_zerop (DECL_SIZE (f2)))))
13896 f2 = TREE_CHAIN (f2);
13897 if (!f1 || !f2)
13898 break;
13899 /* The fields must have the same name, offset and type. */
13900 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13901 || !gimple_compare_field_offset (f1, f2)
13902 || !gimple_canonical_types_compatible_p
13903 (TREE_TYPE (f1), TREE_TYPE (f2),
13904 trust_type_canonical))
13905 return false;
13908 /* If one aggregate has more fields than the other, they
13909 are not the same. */
13910 if (f1 || f2)
13911 return false;
13913 return true;
13916 default:
13917 /* Consider all types with language specific trees in them mutually
13918 compatible. This is executed only from verify_type and false
13919 positives can be tolerated. */
13920 gcc_assert (!in_lto_p);
13921 return true;
13925 /* For OPAQUE_TYPE T, it should have only size and alignment information
13926 and its mode should be of class MODE_OPAQUE. This function verifies
13927 these properties of T match TV which is the main variant of T and TC
13928 which is the canonical of T. */
13930 static void
13931 verify_opaque_type (const_tree t, tree tv, tree tc)
13933 gcc_assert (OPAQUE_TYPE_P (t));
13934 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13935 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13937 /* For an opaque type T1, check if some of its properties match
13938 the corresponding ones of the other opaque type T2, emit some
13939 error messages for those inconsistent ones. */
13940 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13941 const char *kind_msg)
13943 if (!OPAQUE_TYPE_P (t2))
13945 error ("type %s is not an opaque type", kind_msg);
13946 debug_tree (t2);
13947 return;
13949 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13951 error ("type %s is not with opaque mode", kind_msg);
13952 debug_tree (t2);
13953 return;
13955 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13957 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13958 debug_tree (t2);
13959 return;
13961 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13962 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13963 if (maybe_ne (t1_size, t2_size))
13965 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13966 debug_tree (t2);
13967 return;
13969 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13971 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13972 debug_tree (t2);
13973 return;
13975 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13977 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13978 debug_tree (t2);
13979 return;
13983 if (t != tv)
13984 check_properties_for_opaque_type (t, tv, "variant");
13986 if (t != tc)
13987 check_properties_for_opaque_type (t, tc, "canonical");
13990 /* Verify type T. */
13992 void
13993 verify_type (const_tree t)
13995 bool error_found = false;
13996 tree mv = TYPE_MAIN_VARIANT (t);
13997 tree ct = TYPE_CANONICAL (t);
13999 if (OPAQUE_TYPE_P (t))
14001 verify_opaque_type (t, mv, ct);
14002 return;
14005 if (!mv)
14007 error ("main variant is not defined");
14008 error_found = true;
14010 else if (mv != TYPE_MAIN_VARIANT (mv))
14012 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14013 debug_tree (mv);
14014 error_found = true;
14016 else if (t != mv && !verify_type_variant (t, mv))
14017 error_found = true;
14019 if (!ct)
14021 else if (TYPE_CANONICAL (ct) != ct)
14023 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14024 debug_tree (ct);
14025 error_found = true;
14027 /* Method and function types cannot be used to address memory and thus
14028 TYPE_CANONICAL really matters only for determining useless conversions.
14030 FIXME: C++ FE produce declarations of builtin functions that are not
14031 compatible with main variants. */
14032 else if (TREE_CODE (t) == FUNCTION_TYPE)
14034 else if (t != ct
14035 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14036 with variably sized arrays because their sizes possibly
14037 gimplified to different variables. */
14038 && !variably_modified_type_p (ct, NULL)
14039 && !gimple_canonical_types_compatible_p (t, ct, false)
14040 && COMPLETE_TYPE_P (t))
14042 error ("%<TYPE_CANONICAL%> is not compatible");
14043 debug_tree (ct);
14044 error_found = true;
14047 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14048 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14050 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14051 debug_tree (ct);
14052 error_found = true;
14054 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14056 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14057 debug_tree (ct);
14058 debug_tree (TYPE_MAIN_VARIANT (ct));
14059 error_found = true;
14063 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14064 if (RECORD_OR_UNION_TYPE_P (t))
14066 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14067 and danagle the pointer from time to time. */
14068 if (TYPE_VFIELD (t)
14069 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14070 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14072 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14073 debug_tree (TYPE_VFIELD (t));
14074 error_found = true;
14077 else if (TREE_CODE (t) == POINTER_TYPE)
14079 if (TYPE_NEXT_PTR_TO (t)
14080 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14082 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14083 debug_tree (TYPE_NEXT_PTR_TO (t));
14084 error_found = true;
14087 else if (TREE_CODE (t) == REFERENCE_TYPE)
14089 if (TYPE_NEXT_REF_TO (t)
14090 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14092 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14093 debug_tree (TYPE_NEXT_REF_TO (t));
14094 error_found = true;
14097 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14098 || FIXED_POINT_TYPE_P (t))
14100 /* FIXME: The following check should pass:
14101 useless_type_conversion_p (const_cast <tree> (t),
14102 TREE_TYPE (TYPE_MIN_VALUE (t))
14103 but does not for C sizetypes in LTO. */
14106 /* Check various uses of TYPE_MAXVAL_RAW. */
14107 if (RECORD_OR_UNION_TYPE_P (t))
14109 if (!TYPE_BINFO (t))
14111 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14113 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14114 debug_tree (TYPE_BINFO (t));
14115 error_found = true;
14117 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14119 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14120 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14121 error_found = true;
14124 else if (FUNC_OR_METHOD_TYPE_P (t))
14126 if (TYPE_METHOD_BASETYPE (t)
14127 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14128 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14130 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14131 debug_tree (TYPE_METHOD_BASETYPE (t));
14132 error_found = true;
14135 else if (TREE_CODE (t) == OFFSET_TYPE)
14137 if (TYPE_OFFSET_BASETYPE (t)
14138 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14139 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14141 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14142 debug_tree (TYPE_OFFSET_BASETYPE (t));
14143 error_found = true;
14146 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14147 || FIXED_POINT_TYPE_P (t))
14149 /* FIXME: The following check should pass:
14150 useless_type_conversion_p (const_cast <tree> (t),
14151 TREE_TYPE (TYPE_MAX_VALUE (t))
14152 but does not for C sizetypes in LTO. */
14154 else if (TREE_CODE (t) == ARRAY_TYPE)
14156 if (TYPE_ARRAY_MAX_SIZE (t)
14157 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14159 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14160 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14161 error_found = true;
14164 else if (TYPE_MAX_VALUE_RAW (t))
14166 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14167 debug_tree (TYPE_MAX_VALUE_RAW (t));
14168 error_found = true;
14171 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14173 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14174 debug_tree (TYPE_LANG_SLOT_1 (t));
14175 error_found = true;
14178 /* Check various uses of TYPE_VALUES_RAW. */
14179 if (TREE_CODE (t) == ENUMERAL_TYPE)
14180 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14182 tree value = TREE_VALUE (l);
14183 tree name = TREE_PURPOSE (l);
14185 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14186 CONST_DECL of ENUMERAL TYPE. */
14187 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14189 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14190 debug_tree (value);
14191 debug_tree (name);
14192 error_found = true;
14194 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14195 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14196 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14198 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14199 "to the enum");
14200 debug_tree (value);
14201 debug_tree (name);
14202 error_found = true;
14204 if (TREE_CODE (name) != IDENTIFIER_NODE)
14206 error ("enum value name is not %<IDENTIFIER_NODE%>");
14207 debug_tree (value);
14208 debug_tree (name);
14209 error_found = true;
14212 else if (TREE_CODE (t) == ARRAY_TYPE)
14214 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14216 error ("array %<TYPE_DOMAIN%> is not integer type");
14217 debug_tree (TYPE_DOMAIN (t));
14218 error_found = true;
14221 else if (RECORD_OR_UNION_TYPE_P (t))
14223 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14225 error ("%<TYPE_FIELDS%> defined in incomplete type");
14226 error_found = true;
14228 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14230 /* TODO: verify properties of decls. */
14231 if (TREE_CODE (fld) == FIELD_DECL)
14233 else if (TREE_CODE (fld) == TYPE_DECL)
14235 else if (TREE_CODE (fld) == CONST_DECL)
14237 else if (VAR_P (fld))
14239 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14241 else if (TREE_CODE (fld) == USING_DECL)
14243 else if (TREE_CODE (fld) == FUNCTION_DECL)
14245 else
14247 error ("wrong tree in %<TYPE_FIELDS%> list");
14248 debug_tree (fld);
14249 error_found = true;
14253 else if (TREE_CODE (t) == INTEGER_TYPE
14254 || TREE_CODE (t) == BOOLEAN_TYPE
14255 || TREE_CODE (t) == BITINT_TYPE
14256 || TREE_CODE (t) == OFFSET_TYPE
14257 || TREE_CODE (t) == REFERENCE_TYPE
14258 || TREE_CODE (t) == NULLPTR_TYPE
14259 || TREE_CODE (t) == POINTER_TYPE)
14261 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14263 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14264 "is %p",
14265 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14266 error_found = true;
14268 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14270 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14271 debug_tree (TYPE_CACHED_VALUES (t));
14272 error_found = true;
14274 /* Verify just enough of cache to ensure that no one copied it to new type.
14275 All copying should go by copy_node that should clear it. */
14276 else if (TYPE_CACHED_VALUES_P (t))
14278 int i;
14279 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14280 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14281 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14283 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14284 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14285 error_found = true;
14286 break;
14290 else if (FUNC_OR_METHOD_TYPE_P (t))
14291 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14293 /* C++ FE uses TREE_PURPOSE to store initial values. */
14294 if (TREE_PURPOSE (l) && in_lto_p)
14296 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14297 debug_tree (l);
14298 error_found = true;
14300 if (!TYPE_P (TREE_VALUE (l)))
14302 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14303 debug_tree (l);
14304 error_found = true;
14307 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14309 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14310 debug_tree (TYPE_VALUES_RAW (t));
14311 error_found = true;
14313 if (TREE_CODE (t) != INTEGER_TYPE
14314 && TREE_CODE (t) != BOOLEAN_TYPE
14315 && TREE_CODE (t) != BITINT_TYPE
14316 && TREE_CODE (t) != OFFSET_TYPE
14317 && TREE_CODE (t) != REFERENCE_TYPE
14318 && TREE_CODE (t) != NULLPTR_TYPE
14319 && TREE_CODE (t) != POINTER_TYPE
14320 && TYPE_CACHED_VALUES_P (t))
14322 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14323 error_found = true;
14326 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14327 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14328 of a type. */
14329 if (TREE_CODE (t) == METHOD_TYPE
14330 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14332 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14333 error_found = true;
14336 if (error_found)
14338 debug_tree (const_cast <tree> (t));
14339 internal_error ("%qs failed", __func__);
14344 /* Return 1 if ARG interpreted as signed in its precision is known to be
14345 always positive or 2 if ARG is known to be always negative, or 3 if
14346 ARG may be positive or negative. */
14349 get_range_pos_neg (tree arg)
14351 if (arg == error_mark_node)
14352 return 3;
14354 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14355 int cnt = 0;
14356 if (TREE_CODE (arg) == INTEGER_CST)
14358 wide_int w = wi::sext (wi::to_wide (arg), prec);
14359 if (wi::neg_p (w))
14360 return 2;
14361 else
14362 return 1;
14364 while (CONVERT_EXPR_P (arg)
14365 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14366 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14368 arg = TREE_OPERAND (arg, 0);
14369 /* Narrower value zero extended into wider type
14370 will always result in positive values. */
14371 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14372 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14373 return 1;
14374 prec = TYPE_PRECISION (TREE_TYPE (arg));
14375 if (++cnt > 30)
14376 return 3;
14379 if (TREE_CODE (arg) != SSA_NAME)
14380 return 3;
14381 value_range r;
14382 while (!get_global_range_query ()->range_of_expr (r, arg)
14383 || r.undefined_p () || r.varying_p ())
14385 gimple *g = SSA_NAME_DEF_STMT (arg);
14386 if (is_gimple_assign (g)
14387 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14389 tree t = gimple_assign_rhs1 (g);
14390 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14391 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14393 if (TYPE_UNSIGNED (TREE_TYPE (t))
14394 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14395 return 1;
14396 prec = TYPE_PRECISION (TREE_TYPE (t));
14397 arg = t;
14398 if (++cnt > 30)
14399 return 3;
14400 continue;
14403 return 3;
14405 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14407 /* For unsigned values, the "positive" range comes
14408 below the "negative" range. */
14409 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14410 return 1;
14411 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14412 return 2;
14414 else
14416 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14417 return 1;
14418 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14419 return 2;
14421 return 3;
14427 /* Return true if ARG is marked with the nonnull attribute in the
14428 current function signature. */
14430 bool
14431 nonnull_arg_p (const_tree arg)
14433 tree t, attrs, fntype;
14434 unsigned HOST_WIDE_INT arg_num;
14436 gcc_assert (TREE_CODE (arg) == PARM_DECL
14437 && (POINTER_TYPE_P (TREE_TYPE (arg))
14438 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14440 /* The static chain decl is always non null. */
14441 if (arg == cfun->static_chain_decl)
14442 return true;
14444 /* THIS argument of method is always non-NULL. */
14445 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14446 && arg == DECL_ARGUMENTS (cfun->decl)
14447 && flag_delete_null_pointer_checks)
14448 return true;
14450 /* Values passed by reference are always non-NULL. */
14451 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14452 && flag_delete_null_pointer_checks)
14453 return true;
14455 fntype = TREE_TYPE (cfun->decl);
14456 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14458 attrs = lookup_attribute ("nonnull", attrs);
14460 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14461 if (attrs == NULL_TREE)
14462 return false;
14464 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14465 if (TREE_VALUE (attrs) == NULL_TREE)
14466 return true;
14468 /* Get the position number for ARG in the function signature. */
14469 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14471 t = DECL_CHAIN (t), arg_num++)
14473 if (t == arg)
14474 break;
14477 gcc_assert (t == arg);
14479 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14480 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14482 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14483 return true;
14487 return false;
14490 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14491 information. */
14493 location_t
14494 set_block (location_t loc, tree block)
14496 location_t pure_loc = get_pure_location (loc);
14497 source_range src_range = get_range_from_loc (line_table, loc);
14498 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14499 return line_table->get_or_create_combined_loc (pure_loc, src_range, block,
14500 discriminator);
14503 location_t
14504 set_source_range (tree expr, location_t start, location_t finish)
14506 source_range src_range;
14507 src_range.m_start = start;
14508 src_range.m_finish = finish;
14509 return set_source_range (expr, src_range);
14512 location_t
14513 set_source_range (tree expr, source_range src_range)
14515 if (!EXPR_P (expr))
14516 return UNKNOWN_LOCATION;
14518 location_t expr_location = EXPR_LOCATION (expr);
14519 location_t pure_loc = get_pure_location (expr_location);
14520 unsigned discriminator = get_discriminator_from_loc (expr_location);
14521 location_t adhoc = line_table->get_or_create_combined_loc (pure_loc,
14522 src_range,
14523 nullptr,
14524 discriminator);
14525 SET_EXPR_LOCATION (expr, adhoc);
14526 return adhoc;
14529 /* Return EXPR, potentially wrapped with a node expression LOC,
14530 if !CAN_HAVE_LOCATION_P (expr).
14532 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14533 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14535 Wrapper nodes can be identified using location_wrapper_p. */
14537 tree
14538 maybe_wrap_with_location (tree expr, location_t loc)
14540 if (expr == NULL)
14541 return NULL;
14542 if (loc == UNKNOWN_LOCATION)
14543 return expr;
14544 if (CAN_HAVE_LOCATION_P (expr))
14545 return expr;
14546 /* We should only be adding wrappers for constants and for decls,
14547 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14548 gcc_assert (CONSTANT_CLASS_P (expr)
14549 || DECL_P (expr)
14550 || EXCEPTIONAL_CLASS_P (expr));
14552 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14553 any impact of the wrapper nodes. */
14554 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14555 return expr;
14557 /* Compiler-generated temporary variables don't need a wrapper. */
14558 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14559 return expr;
14561 /* If any auto_suppress_location_wrappers are active, don't create
14562 wrappers. */
14563 if (suppress_location_wrappers > 0)
14564 return expr;
14566 tree_code code
14567 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14568 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14569 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14570 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14571 /* Mark this node as being a wrapper. */
14572 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14573 return wrapper;
14576 int suppress_location_wrappers;
14578 /* Return the name of combined function FN, for debugging purposes. */
14580 const char *
14581 combined_fn_name (combined_fn fn)
14583 if (builtin_fn_p (fn))
14585 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14586 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14588 else
14589 return internal_fn_name (as_internal_fn (fn));
14592 /* Return a bitmap with a bit set corresponding to each argument in
14593 a function call type FNTYPE declared with attribute nonnull,
14594 or null if none of the function's argument are nonnull. The caller
14595 must free the bitmap. */
14597 bitmap
14598 get_nonnull_args (const_tree fntype)
14600 if (fntype == NULL_TREE)
14601 return NULL;
14603 bitmap argmap = NULL;
14604 if (TREE_CODE (fntype) == METHOD_TYPE)
14606 /* The this pointer in C++ non-static member functions is
14607 implicitly nonnull whether or not it's declared as such. */
14608 argmap = BITMAP_ALLOC (NULL);
14609 bitmap_set_bit (argmap, 0);
14612 tree attrs = TYPE_ATTRIBUTES (fntype);
14613 if (!attrs)
14614 return argmap;
14616 /* A function declaration can specify multiple attribute nonnull,
14617 each with zero or more arguments. The loop below creates a bitmap
14618 representing a union of all the arguments. An empty (but non-null)
14619 bitmap means that all arguments have been declaraed nonnull. */
14620 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14622 attrs = lookup_attribute ("nonnull", attrs);
14623 if (!attrs)
14624 break;
14626 if (!argmap)
14627 argmap = BITMAP_ALLOC (NULL);
14629 if (!TREE_VALUE (attrs))
14631 /* Clear the bitmap in case a previous attribute nonnull
14632 set it and this one overrides it for all arguments. */
14633 bitmap_clear (argmap);
14634 return argmap;
14637 /* Iterate over the indices of the format arguments declared nonnull
14638 and set a bit for each. */
14639 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14641 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14642 bitmap_set_bit (argmap, val);
14646 return argmap;
14649 /* Returns true if TYPE is a type where it and all of its subobjects
14650 (recursively) are of structure, union, or array type. */
14652 bool
14653 is_empty_type (const_tree type)
14655 if (RECORD_OR_UNION_TYPE_P (type))
14657 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14658 if (TREE_CODE (field) == FIELD_DECL
14659 && !DECL_PADDING_P (field)
14660 && !is_empty_type (TREE_TYPE (field)))
14661 return false;
14662 return true;
14664 else if (TREE_CODE (type) == ARRAY_TYPE)
14665 return (integer_minus_onep (array_type_nelts (type))
14666 || TYPE_DOMAIN (type) == NULL_TREE
14667 || is_empty_type (TREE_TYPE (type)));
14668 return false;
14671 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14672 that shouldn't be passed via stack. */
14674 bool
14675 default_is_empty_record (const_tree type)
14677 if (!abi_version_at_least (12))
14678 return false;
14680 if (type == error_mark_node)
14681 return false;
14683 if (TREE_ADDRESSABLE (type))
14684 return false;
14686 return is_empty_type (TYPE_MAIN_VARIANT (type));
14689 /* Determine whether TYPE is a structure with a flexible array member,
14690 or a union containing such a structure (possibly recursively). */
14692 bool
14693 flexible_array_type_p (const_tree type)
14695 tree x, last;
14696 switch (TREE_CODE (type))
14698 case RECORD_TYPE:
14699 last = NULL_TREE;
14700 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14701 if (TREE_CODE (x) == FIELD_DECL)
14702 last = x;
14703 if (last == NULL_TREE)
14704 return false;
14705 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14706 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14707 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14708 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14709 return true;
14710 return false;
14711 case UNION_TYPE:
14712 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14714 if (TREE_CODE (x) == FIELD_DECL
14715 && flexible_array_type_p (TREE_TYPE (x)))
14716 return true;
14718 return false;
14719 default:
14720 return false;
14724 /* Like int_size_in_bytes, but handle empty records specially. */
14726 HOST_WIDE_INT
14727 arg_int_size_in_bytes (const_tree type)
14729 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14732 /* Like size_in_bytes, but handle empty records specially. */
14734 tree
14735 arg_size_in_bytes (const_tree type)
14737 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14740 /* Return true if an expression with CODE has to have the same result type as
14741 its first operand. */
14743 bool
14744 expr_type_first_operand_type_p (tree_code code)
14746 switch (code)
14748 case NEGATE_EXPR:
14749 case ABS_EXPR:
14750 case BIT_NOT_EXPR:
14751 case PAREN_EXPR:
14752 case CONJ_EXPR:
14754 case PLUS_EXPR:
14755 case MINUS_EXPR:
14756 case MULT_EXPR:
14757 case TRUNC_DIV_EXPR:
14758 case CEIL_DIV_EXPR:
14759 case FLOOR_DIV_EXPR:
14760 case ROUND_DIV_EXPR:
14761 case TRUNC_MOD_EXPR:
14762 case CEIL_MOD_EXPR:
14763 case FLOOR_MOD_EXPR:
14764 case ROUND_MOD_EXPR:
14765 case RDIV_EXPR:
14766 case EXACT_DIV_EXPR:
14767 case MIN_EXPR:
14768 case MAX_EXPR:
14769 case BIT_IOR_EXPR:
14770 case BIT_XOR_EXPR:
14771 case BIT_AND_EXPR:
14773 case LSHIFT_EXPR:
14774 case RSHIFT_EXPR:
14775 case LROTATE_EXPR:
14776 case RROTATE_EXPR:
14777 return true;
14779 default:
14780 return false;
14784 /* Return a typenode for the "standard" C type with a given name. */
14785 tree
14786 get_typenode_from_name (const char *name)
14788 if (name == NULL || *name == '\0')
14789 return NULL_TREE;
14791 if (strcmp (name, "char") == 0)
14792 return char_type_node;
14793 if (strcmp (name, "unsigned char") == 0)
14794 return unsigned_char_type_node;
14795 if (strcmp (name, "signed char") == 0)
14796 return signed_char_type_node;
14798 if (strcmp (name, "short int") == 0)
14799 return short_integer_type_node;
14800 if (strcmp (name, "short unsigned int") == 0)
14801 return short_unsigned_type_node;
14803 if (strcmp (name, "int") == 0)
14804 return integer_type_node;
14805 if (strcmp (name, "unsigned int") == 0)
14806 return unsigned_type_node;
14808 if (strcmp (name, "long int") == 0)
14809 return long_integer_type_node;
14810 if (strcmp (name, "long unsigned int") == 0)
14811 return long_unsigned_type_node;
14813 if (strcmp (name, "long long int") == 0)
14814 return long_long_integer_type_node;
14815 if (strcmp (name, "long long unsigned int") == 0)
14816 return long_long_unsigned_type_node;
14818 gcc_unreachable ();
14821 /* List of pointer types used to declare builtins before we have seen their
14822 real declaration.
14824 Keep the size up to date in tree.h ! */
14825 const builtin_structptr_type builtin_structptr_types[6] =
14827 { fileptr_type_node, ptr_type_node, "FILE" },
14828 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14829 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14830 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14831 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14832 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14835 /* Return the maximum object size. */
14837 tree
14838 max_object_size (void)
14840 /* To do: Make this a configurable parameter. */
14841 return TYPE_MAX_VALUE (ptrdiff_type_node);
14844 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14845 parameter default to false and that weeds out error_mark_node. */
14847 bool
14848 verify_type_context (location_t loc, type_context_kind context,
14849 const_tree type, bool silent_p)
14851 if (type == error_mark_node)
14852 return true;
14854 gcc_assert (TYPE_P (type));
14855 return (!targetm.verify_type_context
14856 || targetm.verify_type_context (loc, context, type, silent_p));
14859 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14860 delete operators. Return false if they may or may not name such
14861 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14862 do not. */
14864 bool
14865 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14866 bool *pcertain /* = NULL */)
14868 bool certain;
14869 if (!pcertain)
14870 pcertain = &certain;
14872 const char *new_name = IDENTIFIER_POINTER (new_asm);
14873 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14874 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14875 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14877 /* The following failures are due to invalid names so they're not
14878 considered certain mismatches. */
14879 *pcertain = false;
14881 if (new_len < 5 || delete_len < 6)
14882 return false;
14883 if (new_name[0] == '_')
14884 ++new_name, --new_len;
14885 if (new_name[0] == '_')
14886 ++new_name, --new_len;
14887 if (delete_name[0] == '_')
14888 ++delete_name, --delete_len;
14889 if (delete_name[0] == '_')
14890 ++delete_name, --delete_len;
14891 if (new_len < 4 || delete_len < 5)
14892 return false;
14894 /* The following failures are due to names of user-defined operators
14895 so they're also not considered certain mismatches. */
14897 /* *_len is now just the length after initial underscores. */
14898 if (new_name[0] != 'Z' || new_name[1] != 'n')
14899 return false;
14900 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14901 return false;
14903 /* The following failures are certain mismatches. */
14904 *pcertain = true;
14906 /* _Znw must match _Zdl, _Zna must match _Zda. */
14907 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14908 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14909 return false;
14910 /* 'j', 'm' and 'y' correspond to size_t. */
14911 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14912 return false;
14913 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14914 return false;
14915 if (new_len == 4
14916 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14918 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14919 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14920 if (delete_len == 5)
14921 return true;
14922 if (delete_len == 6 && delete_name[5] == new_name[3])
14923 return true;
14924 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14925 return true;
14927 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14928 || (new_len == 33
14929 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14931 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14932 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14933 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14934 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14935 return true;
14936 if (delete_len == 21
14937 && delete_name[5] == new_name[3]
14938 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14939 return true;
14940 if (delete_len == 34
14941 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14942 return true;
14945 /* The negative result is conservative. */
14946 *pcertain = false;
14947 return false;
14950 /* Return the zero-based number corresponding to the argument being
14951 deallocated if FNDECL is a deallocation function or an out-of-bounds
14952 value if it isn't. */
14954 unsigned
14955 fndecl_dealloc_argno (tree fndecl)
14957 /* A call to operator delete isn't recognized as one to a built-in. */
14958 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14960 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14961 return 0;
14963 /* Avoid placement delete that's not been inlined. */
14964 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14965 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14966 || id_equal (fname, "_ZdaPvS_")) // array form
14967 return UINT_MAX;
14968 return 0;
14971 /* TODO: Handle user-defined functions with attribute malloc? Handle
14972 known non-built-ins like fopen? */
14973 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14975 switch (DECL_FUNCTION_CODE (fndecl))
14977 case BUILT_IN_FREE:
14978 case BUILT_IN_REALLOC:
14979 return 0;
14980 default:
14981 break;
14983 return UINT_MAX;
14986 tree attrs = DECL_ATTRIBUTES (fndecl);
14987 if (!attrs)
14988 return UINT_MAX;
14990 for (tree atfree = attrs;
14991 (atfree = lookup_attribute ("*dealloc", atfree));
14992 atfree = TREE_CHAIN (atfree))
14994 tree alloc = TREE_VALUE (atfree);
14995 if (!alloc)
14996 continue;
14998 tree pos = TREE_CHAIN (alloc);
14999 if (!pos)
15000 return 0;
15002 pos = TREE_VALUE (pos);
15003 return TREE_INT_CST_LOW (pos) - 1;
15006 return UINT_MAX;
15009 /* If EXPR refers to a character array or pointer declared attribute
15010 nonstring, return a decl for that array or pointer and set *REF
15011 to the referenced enclosing object or pointer. Otherwise return
15012 null. */
15014 tree
15015 get_attr_nonstring_decl (tree expr, tree *ref)
15017 tree decl = expr;
15018 tree var = NULL_TREE;
15019 if (TREE_CODE (decl) == SSA_NAME)
15021 gimple *def = SSA_NAME_DEF_STMT (decl);
15023 if (is_gimple_assign (def))
15025 tree_code code = gimple_assign_rhs_code (def);
15026 if (code == ADDR_EXPR
15027 || code == COMPONENT_REF
15028 || code == VAR_DECL)
15029 decl = gimple_assign_rhs1 (def);
15031 else
15032 var = SSA_NAME_VAR (decl);
15035 if (TREE_CODE (decl) == ADDR_EXPR)
15036 decl = TREE_OPERAND (decl, 0);
15038 /* To simplify calling code, store the referenced DECL regardless of
15039 the attribute determined below, but avoid storing the SSA_NAME_VAR
15040 obtained above (it's not useful for dataflow purposes). */
15041 if (ref)
15042 *ref = decl;
15044 /* Use the SSA_NAME_VAR that was determined above to see if it's
15045 declared nonstring. Otherwise drill down into the referenced
15046 DECL. */
15047 if (var)
15048 decl = var;
15049 else if (TREE_CODE (decl) == ARRAY_REF)
15050 decl = TREE_OPERAND (decl, 0);
15051 else if (TREE_CODE (decl) == COMPONENT_REF)
15052 decl = TREE_OPERAND (decl, 1);
15053 else if (TREE_CODE (decl) == MEM_REF)
15054 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15056 if (DECL_P (decl)
15057 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15058 return decl;
15060 return NULL_TREE;
15063 /* Return length of attribute names string,
15064 if arglist chain > 1, -1 otherwise. */
15067 get_target_clone_attr_len (tree arglist)
15069 tree arg;
15070 int str_len_sum = 0;
15071 int argnum = 0;
15073 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15075 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15076 size_t len = strlen (str);
15077 str_len_sum += len + 1;
15078 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15079 argnum++;
15080 argnum++;
15082 if (argnum <= 1)
15083 return -1;
15084 return str_len_sum;
15087 void
15088 tree_cc_finalize (void)
15090 clear_nonstandard_integer_type_cache ();
15091 vec_free (bitint_type_cache);
15094 #if CHECKING_P
15096 namespace selftest {
15098 /* Selftests for tree. */
15100 /* Verify that integer constants are sane. */
15102 static void
15103 test_integer_constants ()
15105 ASSERT_TRUE (integer_type_node != NULL);
15106 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15108 tree type = integer_type_node;
15110 tree zero = build_zero_cst (type);
15111 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15112 ASSERT_EQ (type, TREE_TYPE (zero));
15114 tree one = build_int_cst (type, 1);
15115 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15116 ASSERT_EQ (type, TREE_TYPE (zero));
15119 /* Verify identifiers. */
15121 static void
15122 test_identifiers ()
15124 tree identifier = get_identifier ("foo");
15125 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15126 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15129 /* Verify LABEL_DECL. */
15131 static void
15132 test_labels ()
15134 tree identifier = get_identifier ("err");
15135 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15136 identifier, void_type_node);
15137 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15138 ASSERT_FALSE (FORCED_LABEL (label_decl));
15141 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15142 are given by VALS. */
15144 static tree
15145 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15147 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15148 tree_vector_builder builder (type, vals.length (), 1);
15149 builder.splice (vals);
15150 return builder.build ();
15153 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15155 static void
15156 check_vector_cst (const vec<tree> &expected, tree actual)
15158 ASSERT_KNOWN_EQ (expected.length (),
15159 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15160 for (unsigned int i = 0; i < expected.length (); ++i)
15161 ASSERT_EQ (wi::to_wide (expected[i]),
15162 wi::to_wide (vector_cst_elt (actual, i)));
15165 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15166 and that its elements match EXPECTED. */
15168 static void
15169 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15170 unsigned int npatterns)
15172 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15173 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15174 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15175 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15176 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15177 check_vector_cst (expected, actual);
15180 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15181 and NPATTERNS background elements, and that its elements match
15182 EXPECTED. */
15184 static void
15185 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15186 unsigned int npatterns)
15188 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15189 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15190 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15191 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15192 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15193 check_vector_cst (expected, actual);
15196 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15197 and that its elements match EXPECTED. */
15199 static void
15200 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15201 unsigned int npatterns)
15203 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15204 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15205 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15206 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15207 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15208 check_vector_cst (expected, actual);
15211 /* Test the creation of VECTOR_CSTs. */
15213 static void
15214 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15216 auto_vec<tree, 8> elements (8);
15217 elements.quick_grow (8);
15218 tree element_type = build_nonstandard_integer_type (16, true);
15219 tree vector_type = build_vector_type (element_type, 8);
15221 /* Test a simple linear series with a base of 0 and a step of 1:
15222 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15223 for (unsigned int i = 0; i < 8; ++i)
15224 elements[i] = build_int_cst (element_type, i);
15225 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15226 check_vector_cst_stepped (elements, vector, 1);
15228 /* Try the same with the first element replaced by 100:
15229 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15230 elements[0] = build_int_cst (element_type, 100);
15231 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15232 check_vector_cst_stepped (elements, vector, 1);
15234 /* Try a series that wraps around.
15235 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15236 for (unsigned int i = 1; i < 8; ++i)
15237 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15238 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15239 check_vector_cst_stepped (elements, vector, 1);
15241 /* Try a downward series:
15242 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15243 for (unsigned int i = 1; i < 8; ++i)
15244 elements[i] = build_int_cst (element_type, 80 - i);
15245 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15246 check_vector_cst_stepped (elements, vector, 1);
15248 /* Try two interleaved series with different bases and steps:
15249 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15250 elements[1] = build_int_cst (element_type, 53);
15251 for (unsigned int i = 2; i < 8; i += 2)
15253 elements[i] = build_int_cst (element_type, 70 - i * 2);
15254 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15256 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15257 check_vector_cst_stepped (elements, vector, 2);
15259 /* Try a duplicated value:
15260 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15261 for (unsigned int i = 1; i < 8; ++i)
15262 elements[i] = elements[0];
15263 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15264 check_vector_cst_duplicate (elements, vector, 1);
15266 /* Try an interleaved duplicated value:
15267 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15268 elements[1] = build_int_cst (element_type, 55);
15269 for (unsigned int i = 2; i < 8; ++i)
15270 elements[i] = elements[i - 2];
15271 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15272 check_vector_cst_duplicate (elements, vector, 2);
15274 /* Try a duplicated value with 2 exceptions
15275 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15276 elements[0] = build_int_cst (element_type, 41);
15277 elements[1] = build_int_cst (element_type, 97);
15278 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15279 check_vector_cst_fill (elements, vector, 2);
15281 /* Try with and without a step
15282 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15283 for (unsigned int i = 3; i < 8; i += 2)
15284 elements[i] = build_int_cst (element_type, i * 7);
15285 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15286 check_vector_cst_stepped (elements, vector, 2);
15288 /* Try a fully-general constant:
15289 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15290 elements[5] = build_int_cst (element_type, 9990);
15291 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15292 check_vector_cst_fill (elements, vector, 4);
15295 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15296 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15297 modifying its argument in-place. */
15299 static void
15300 check_strip_nops (tree node, tree expected)
15302 STRIP_NOPS (node);
15303 ASSERT_EQ (expected, node);
15306 /* Verify location wrappers. */
15308 static void
15309 test_location_wrappers ()
15311 location_t loc = BUILTINS_LOCATION;
15313 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15315 /* Wrapping a constant. */
15316 tree int_cst = build_int_cst (integer_type_node, 42);
15317 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15318 ASSERT_FALSE (location_wrapper_p (int_cst));
15320 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15321 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15322 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15323 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15325 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15326 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15328 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15329 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15330 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15331 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15333 /* Wrapping a STRING_CST. */
15334 tree string_cst = build_string (4, "foo");
15335 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15336 ASSERT_FALSE (location_wrapper_p (string_cst));
15338 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15339 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15340 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15341 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15342 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15345 /* Wrapping a variable. */
15346 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15347 get_identifier ("some_int_var"),
15348 integer_type_node);
15349 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15350 ASSERT_FALSE (location_wrapper_p (int_var));
15352 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15353 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15354 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15355 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15357 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15358 wrapper. */
15359 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15360 ASSERT_FALSE (location_wrapper_p (r_cast));
15361 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15363 /* Verify that STRIP_NOPS removes wrappers. */
15364 check_strip_nops (wrapped_int_cst, int_cst);
15365 check_strip_nops (wrapped_string_cst, string_cst);
15366 check_strip_nops (wrapped_int_var, int_var);
15369 /* Test various tree predicates. Verify that location wrappers don't
15370 affect the results. */
15372 static void
15373 test_predicates ()
15375 /* Build various constants and wrappers around them. */
15377 location_t loc = BUILTINS_LOCATION;
15379 tree i_0 = build_int_cst (integer_type_node, 0);
15380 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15382 tree i_1 = build_int_cst (integer_type_node, 1);
15383 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15385 tree i_m1 = build_int_cst (integer_type_node, -1);
15386 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15388 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15389 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15390 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15391 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15392 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15393 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15395 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15396 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15397 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15399 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15400 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15401 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15403 /* TODO: vector constants. */
15405 /* Test integer_onep. */
15406 ASSERT_FALSE (integer_onep (i_0));
15407 ASSERT_FALSE (integer_onep (wr_i_0));
15408 ASSERT_TRUE (integer_onep (i_1));
15409 ASSERT_TRUE (integer_onep (wr_i_1));
15410 ASSERT_FALSE (integer_onep (i_m1));
15411 ASSERT_FALSE (integer_onep (wr_i_m1));
15412 ASSERT_FALSE (integer_onep (f_0));
15413 ASSERT_FALSE (integer_onep (wr_f_0));
15414 ASSERT_FALSE (integer_onep (f_1));
15415 ASSERT_FALSE (integer_onep (wr_f_1));
15416 ASSERT_FALSE (integer_onep (f_m1));
15417 ASSERT_FALSE (integer_onep (wr_f_m1));
15418 ASSERT_FALSE (integer_onep (c_i_0));
15419 ASSERT_TRUE (integer_onep (c_i_1));
15420 ASSERT_FALSE (integer_onep (c_i_m1));
15421 ASSERT_FALSE (integer_onep (c_f_0));
15422 ASSERT_FALSE (integer_onep (c_f_1));
15423 ASSERT_FALSE (integer_onep (c_f_m1));
15425 /* Test integer_zerop. */
15426 ASSERT_TRUE (integer_zerop (i_0));
15427 ASSERT_TRUE (integer_zerop (wr_i_0));
15428 ASSERT_FALSE (integer_zerop (i_1));
15429 ASSERT_FALSE (integer_zerop (wr_i_1));
15430 ASSERT_FALSE (integer_zerop (i_m1));
15431 ASSERT_FALSE (integer_zerop (wr_i_m1));
15432 ASSERT_FALSE (integer_zerop (f_0));
15433 ASSERT_FALSE (integer_zerop (wr_f_0));
15434 ASSERT_FALSE (integer_zerop (f_1));
15435 ASSERT_FALSE (integer_zerop (wr_f_1));
15436 ASSERT_FALSE (integer_zerop (f_m1));
15437 ASSERT_FALSE (integer_zerop (wr_f_m1));
15438 ASSERT_TRUE (integer_zerop (c_i_0));
15439 ASSERT_FALSE (integer_zerop (c_i_1));
15440 ASSERT_FALSE (integer_zerop (c_i_m1));
15441 ASSERT_FALSE (integer_zerop (c_f_0));
15442 ASSERT_FALSE (integer_zerop (c_f_1));
15443 ASSERT_FALSE (integer_zerop (c_f_m1));
15445 /* Test integer_all_onesp. */
15446 ASSERT_FALSE (integer_all_onesp (i_0));
15447 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15448 ASSERT_FALSE (integer_all_onesp (i_1));
15449 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15450 ASSERT_TRUE (integer_all_onesp (i_m1));
15451 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15452 ASSERT_FALSE (integer_all_onesp (f_0));
15453 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15454 ASSERT_FALSE (integer_all_onesp (f_1));
15455 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15456 ASSERT_FALSE (integer_all_onesp (f_m1));
15457 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15458 ASSERT_FALSE (integer_all_onesp (c_i_0));
15459 ASSERT_FALSE (integer_all_onesp (c_i_1));
15460 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15461 ASSERT_FALSE (integer_all_onesp (c_f_0));
15462 ASSERT_FALSE (integer_all_onesp (c_f_1));
15463 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15465 /* Test integer_minus_onep. */
15466 ASSERT_FALSE (integer_minus_onep (i_0));
15467 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15468 ASSERT_FALSE (integer_minus_onep (i_1));
15469 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15470 ASSERT_TRUE (integer_minus_onep (i_m1));
15471 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15472 ASSERT_FALSE (integer_minus_onep (f_0));
15473 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15474 ASSERT_FALSE (integer_minus_onep (f_1));
15475 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15476 ASSERT_FALSE (integer_minus_onep (f_m1));
15477 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15478 ASSERT_FALSE (integer_minus_onep (c_i_0));
15479 ASSERT_FALSE (integer_minus_onep (c_i_1));
15480 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15481 ASSERT_FALSE (integer_minus_onep (c_f_0));
15482 ASSERT_FALSE (integer_minus_onep (c_f_1));
15483 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15485 /* Test integer_each_onep. */
15486 ASSERT_FALSE (integer_each_onep (i_0));
15487 ASSERT_FALSE (integer_each_onep (wr_i_0));
15488 ASSERT_TRUE (integer_each_onep (i_1));
15489 ASSERT_TRUE (integer_each_onep (wr_i_1));
15490 ASSERT_FALSE (integer_each_onep (i_m1));
15491 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15492 ASSERT_FALSE (integer_each_onep (f_0));
15493 ASSERT_FALSE (integer_each_onep (wr_f_0));
15494 ASSERT_FALSE (integer_each_onep (f_1));
15495 ASSERT_FALSE (integer_each_onep (wr_f_1));
15496 ASSERT_FALSE (integer_each_onep (f_m1));
15497 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15498 ASSERT_FALSE (integer_each_onep (c_i_0));
15499 ASSERT_FALSE (integer_each_onep (c_i_1));
15500 ASSERT_FALSE (integer_each_onep (c_i_m1));
15501 ASSERT_FALSE (integer_each_onep (c_f_0));
15502 ASSERT_FALSE (integer_each_onep (c_f_1));
15503 ASSERT_FALSE (integer_each_onep (c_f_m1));
15505 /* Test integer_truep. */
15506 ASSERT_FALSE (integer_truep (i_0));
15507 ASSERT_FALSE (integer_truep (wr_i_0));
15508 ASSERT_TRUE (integer_truep (i_1));
15509 ASSERT_TRUE (integer_truep (wr_i_1));
15510 ASSERT_FALSE (integer_truep (i_m1));
15511 ASSERT_FALSE (integer_truep (wr_i_m1));
15512 ASSERT_FALSE (integer_truep (f_0));
15513 ASSERT_FALSE (integer_truep (wr_f_0));
15514 ASSERT_FALSE (integer_truep (f_1));
15515 ASSERT_FALSE (integer_truep (wr_f_1));
15516 ASSERT_FALSE (integer_truep (f_m1));
15517 ASSERT_FALSE (integer_truep (wr_f_m1));
15518 ASSERT_FALSE (integer_truep (c_i_0));
15519 ASSERT_TRUE (integer_truep (c_i_1));
15520 ASSERT_FALSE (integer_truep (c_i_m1));
15521 ASSERT_FALSE (integer_truep (c_f_0));
15522 ASSERT_FALSE (integer_truep (c_f_1));
15523 ASSERT_FALSE (integer_truep (c_f_m1));
15525 /* Test integer_nonzerop. */
15526 ASSERT_FALSE (integer_nonzerop (i_0));
15527 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15528 ASSERT_TRUE (integer_nonzerop (i_1));
15529 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15530 ASSERT_TRUE (integer_nonzerop (i_m1));
15531 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15532 ASSERT_FALSE (integer_nonzerop (f_0));
15533 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15534 ASSERT_FALSE (integer_nonzerop (f_1));
15535 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15536 ASSERT_FALSE (integer_nonzerop (f_m1));
15537 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15538 ASSERT_FALSE (integer_nonzerop (c_i_0));
15539 ASSERT_TRUE (integer_nonzerop (c_i_1));
15540 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15541 ASSERT_FALSE (integer_nonzerop (c_f_0));
15542 ASSERT_FALSE (integer_nonzerop (c_f_1));
15543 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15545 /* Test real_zerop. */
15546 ASSERT_FALSE (real_zerop (i_0));
15547 ASSERT_FALSE (real_zerop (wr_i_0));
15548 ASSERT_FALSE (real_zerop (i_1));
15549 ASSERT_FALSE (real_zerop (wr_i_1));
15550 ASSERT_FALSE (real_zerop (i_m1));
15551 ASSERT_FALSE (real_zerop (wr_i_m1));
15552 ASSERT_TRUE (real_zerop (f_0));
15553 ASSERT_TRUE (real_zerop (wr_f_0));
15554 ASSERT_FALSE (real_zerop (f_1));
15555 ASSERT_FALSE (real_zerop (wr_f_1));
15556 ASSERT_FALSE (real_zerop (f_m1));
15557 ASSERT_FALSE (real_zerop (wr_f_m1));
15558 ASSERT_FALSE (real_zerop (c_i_0));
15559 ASSERT_FALSE (real_zerop (c_i_1));
15560 ASSERT_FALSE (real_zerop (c_i_m1));
15561 ASSERT_TRUE (real_zerop (c_f_0));
15562 ASSERT_FALSE (real_zerop (c_f_1));
15563 ASSERT_FALSE (real_zerop (c_f_m1));
15565 /* Test real_onep. */
15566 ASSERT_FALSE (real_onep (i_0));
15567 ASSERT_FALSE (real_onep (wr_i_0));
15568 ASSERT_FALSE (real_onep (i_1));
15569 ASSERT_FALSE (real_onep (wr_i_1));
15570 ASSERT_FALSE (real_onep (i_m1));
15571 ASSERT_FALSE (real_onep (wr_i_m1));
15572 ASSERT_FALSE (real_onep (f_0));
15573 ASSERT_FALSE (real_onep (wr_f_0));
15574 ASSERT_TRUE (real_onep (f_1));
15575 ASSERT_TRUE (real_onep (wr_f_1));
15576 ASSERT_FALSE (real_onep (f_m1));
15577 ASSERT_FALSE (real_onep (wr_f_m1));
15578 ASSERT_FALSE (real_onep (c_i_0));
15579 ASSERT_FALSE (real_onep (c_i_1));
15580 ASSERT_FALSE (real_onep (c_i_m1));
15581 ASSERT_FALSE (real_onep (c_f_0));
15582 ASSERT_TRUE (real_onep (c_f_1));
15583 ASSERT_FALSE (real_onep (c_f_m1));
15585 /* Test real_minus_onep. */
15586 ASSERT_FALSE (real_minus_onep (i_0));
15587 ASSERT_FALSE (real_minus_onep (wr_i_0));
15588 ASSERT_FALSE (real_minus_onep (i_1));
15589 ASSERT_FALSE (real_minus_onep (wr_i_1));
15590 ASSERT_FALSE (real_minus_onep (i_m1));
15591 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15592 ASSERT_FALSE (real_minus_onep (f_0));
15593 ASSERT_FALSE (real_minus_onep (wr_f_0));
15594 ASSERT_FALSE (real_minus_onep (f_1));
15595 ASSERT_FALSE (real_minus_onep (wr_f_1));
15596 ASSERT_TRUE (real_minus_onep (f_m1));
15597 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15598 ASSERT_FALSE (real_minus_onep (c_i_0));
15599 ASSERT_FALSE (real_minus_onep (c_i_1));
15600 ASSERT_FALSE (real_minus_onep (c_i_m1));
15601 ASSERT_FALSE (real_minus_onep (c_f_0));
15602 ASSERT_FALSE (real_minus_onep (c_f_1));
15603 ASSERT_TRUE (real_minus_onep (c_f_m1));
15605 /* Test zerop. */
15606 ASSERT_TRUE (zerop (i_0));
15607 ASSERT_TRUE (zerop (wr_i_0));
15608 ASSERT_FALSE (zerop (i_1));
15609 ASSERT_FALSE (zerop (wr_i_1));
15610 ASSERT_FALSE (zerop (i_m1));
15611 ASSERT_FALSE (zerop (wr_i_m1));
15612 ASSERT_TRUE (zerop (f_0));
15613 ASSERT_TRUE (zerop (wr_f_0));
15614 ASSERT_FALSE (zerop (f_1));
15615 ASSERT_FALSE (zerop (wr_f_1));
15616 ASSERT_FALSE (zerop (f_m1));
15617 ASSERT_FALSE (zerop (wr_f_m1));
15618 ASSERT_TRUE (zerop (c_i_0));
15619 ASSERT_FALSE (zerop (c_i_1));
15620 ASSERT_FALSE (zerop (c_i_m1));
15621 ASSERT_TRUE (zerop (c_f_0));
15622 ASSERT_FALSE (zerop (c_f_1));
15623 ASSERT_FALSE (zerop (c_f_m1));
15625 /* Test tree_expr_nonnegative_p. */
15626 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15627 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15628 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15629 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15630 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15631 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15632 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15633 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15634 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15635 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15636 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15637 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15638 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15639 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15640 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15641 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15642 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15643 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15645 /* Test tree_expr_nonzero_p. */
15646 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15647 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15648 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15649 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15650 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15651 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15653 /* Test integer_valued_real_p. */
15654 ASSERT_FALSE (integer_valued_real_p (i_0));
15655 ASSERT_TRUE (integer_valued_real_p (f_0));
15656 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15657 ASSERT_TRUE (integer_valued_real_p (f_1));
15658 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15660 /* Test integer_pow2p. */
15661 ASSERT_FALSE (integer_pow2p (i_0));
15662 ASSERT_TRUE (integer_pow2p (i_1));
15663 ASSERT_TRUE (integer_pow2p (wr_i_1));
15665 /* Test uniform_integer_cst_p. */
15666 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15667 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15668 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15669 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15670 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15671 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15672 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15673 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15674 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15675 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15676 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15677 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15678 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15679 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15680 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15681 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15682 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15683 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15686 /* Check that string escaping works correctly. */
15688 static void
15689 test_escaped_strings (void)
15691 int saved_cutoff;
15692 escaped_string msg;
15694 msg.escape (NULL);
15695 /* ASSERT_STREQ does not accept NULL as a valid test
15696 result, so we have to use ASSERT_EQ instead. */
15697 ASSERT_EQ (NULL, (const char *) msg);
15699 msg.escape ("");
15700 ASSERT_STREQ ("", (const char *) msg);
15702 msg.escape ("foobar");
15703 ASSERT_STREQ ("foobar", (const char *) msg);
15705 /* Ensure that we have -fmessage-length set to 0. */
15706 saved_cutoff = pp_line_cutoff (global_dc->printer);
15707 pp_line_cutoff (global_dc->printer) = 0;
15709 msg.escape ("foo\nbar");
15710 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15712 msg.escape ("\a\b\f\n\r\t\v");
15713 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15715 /* Now repeat the tests with -fmessage-length set to 5. */
15716 pp_line_cutoff (global_dc->printer) = 5;
15718 /* Note that the newline is not translated into an escape. */
15719 msg.escape ("foo\nbar");
15720 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15722 msg.escape ("\a\b\f\n\r\t\v");
15723 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15725 /* Restore the original message length setting. */
15726 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15729 /* Run all of the selftests within this file. */
15731 void
15732 tree_cc_tests ()
15734 test_integer_constants ();
15735 test_identifiers ();
15736 test_labels ();
15737 test_vector_cst_patterns ();
15738 test_location_wrappers ();
15739 test_predicates ();
15740 test_escaped_strings ();
15743 } // namespace selftest
15745 #endif /* CHECKING_P */
15747 #include "gt-tree.h"