aix: Fix building fat library for AIX
[official-gcc.git] / gcc / tree.cc
blob780662549fea47e7896f8f9100b7e27e9a49231c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_SELF */
284 1, /* OMP_CLAUSE_NUM_THREADS */
285 1, /* OMP_CLAUSE_SCHEDULE */
286 0, /* OMP_CLAUSE_NOWAIT */
287 1, /* OMP_CLAUSE_ORDERED */
288 0, /* OMP_CLAUSE_DEFAULT */
289 3, /* OMP_CLAUSE_COLLAPSE */
290 0, /* OMP_CLAUSE_UNTIED */
291 1, /* OMP_CLAUSE_FINAL */
292 0, /* OMP_CLAUSE_MERGEABLE */
293 1, /* OMP_CLAUSE_DEVICE */
294 1, /* OMP_CLAUSE_DIST_SCHEDULE */
295 0, /* OMP_CLAUSE_INBRANCH */
296 0, /* OMP_CLAUSE_NOTINBRANCH */
297 2, /* OMP_CLAUSE_NUM_TEAMS */
298 1, /* OMP_CLAUSE_THREAD_LIMIT */
299 0, /* OMP_CLAUSE_PROC_BIND */
300 1, /* OMP_CLAUSE_SAFELEN */
301 1, /* OMP_CLAUSE_SIMDLEN */
302 0, /* OMP_CLAUSE_DEVICE_TYPE */
303 0, /* OMP_CLAUSE_FOR */
304 0, /* OMP_CLAUSE_PARALLEL */
305 0, /* OMP_CLAUSE_SECTIONS */
306 0, /* OMP_CLAUSE_TASKGROUP */
307 1, /* OMP_CLAUSE_PRIORITY */
308 1, /* OMP_CLAUSE_GRAINSIZE */
309 1, /* OMP_CLAUSE_NUM_TASKS */
310 0, /* OMP_CLAUSE_NOGROUP */
311 0, /* OMP_CLAUSE_THREADS */
312 0, /* OMP_CLAUSE_SIMD */
313 1, /* OMP_CLAUSE_HINT */
314 0, /* OMP_CLAUSE_DEFAULTMAP */
315 0, /* OMP_CLAUSE_ORDER */
316 0, /* OMP_CLAUSE_BIND */
317 1, /* OMP_CLAUSE_FILTER */
318 1, /* OMP_CLAUSE_INDIRECT */
319 1, /* OMP_CLAUSE__SIMDUID_ */
320 0, /* OMP_CLAUSE__SIMT_ */
321 0, /* OMP_CLAUSE_INDEPENDENT */
322 1, /* OMP_CLAUSE_WORKER */
323 1, /* OMP_CLAUSE_VECTOR */
324 1, /* OMP_CLAUSE_NUM_GANGS */
325 1, /* OMP_CLAUSE_NUM_WORKERS */
326 1, /* OMP_CLAUSE_VECTOR_LENGTH */
327 3, /* OMP_CLAUSE_TILE */
328 0, /* OMP_CLAUSE_IF_PRESENT */
329 0, /* OMP_CLAUSE_FINALIZE */
330 0, /* OMP_CLAUSE_NOHOST */
333 const char * const omp_clause_code_name[] =
335 "error_clause",
336 "private",
337 "shared",
338 "firstprivate",
339 "lastprivate",
340 "reduction",
341 "task_reduction",
342 "in_reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "affinity",
347 "aligned",
348 "allocate",
349 "depend",
350 "nontemporal",
351 "uniform",
352 "enter",
353 "link",
354 "detach",
355 "use_device_ptr",
356 "use_device_addr",
357 "is_device_ptr",
358 "inclusive",
359 "exclusive",
360 "from",
361 "to",
362 "map",
363 "has_device_addr",
364 "doacross",
365 "_cache_",
366 "gang",
367 "async",
368 "wait",
369 "auto",
370 "seq",
371 "_looptemp_",
372 "_reductemp_",
373 "_condtemp_",
374 "_scantemp_",
375 "if",
376 "self",
377 "num_threads",
378 "schedule",
379 "nowait",
380 "ordered",
381 "default",
382 "collapse",
383 "untied",
384 "final",
385 "mergeable",
386 "device",
387 "dist_schedule",
388 "inbranch",
389 "notinbranch",
390 "num_teams",
391 "thread_limit",
392 "proc_bind",
393 "safelen",
394 "simdlen",
395 "device_type",
396 "for",
397 "parallel",
398 "sections",
399 "taskgroup",
400 "priority",
401 "grainsize",
402 "num_tasks",
403 "nogroup",
404 "threads",
405 "simd",
406 "hint",
407 "defaultmap",
408 "order",
409 "bind",
410 "filter",
411 "indirect",
412 "_simduid_",
413 "_simt_",
414 "independent",
415 "worker",
416 "vector",
417 "num_gangs",
418 "num_workers",
419 "vector_length",
420 "tile",
421 "if_present",
422 "finalize",
423 "nohost",
426 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
427 clause names, but for use in diagnostics etc. would like to use the "user"
428 clause names. */
430 const char *
431 user_omp_clause_code_name (tree clause, bool oacc)
433 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
434 distinguish clauses as seen by the user. See also where front ends do
435 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
436 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
437 switch (OMP_CLAUSE_MAP_KIND (clause))
439 case GOMP_MAP_FORCE_ALLOC:
440 case GOMP_MAP_ALLOC: return "create";
441 case GOMP_MAP_FORCE_TO:
442 case GOMP_MAP_TO: return "copyin";
443 case GOMP_MAP_FORCE_FROM:
444 case GOMP_MAP_FROM: return "copyout";
445 case GOMP_MAP_FORCE_TOFROM:
446 case GOMP_MAP_TOFROM: return "copy";
447 case GOMP_MAP_RELEASE: return "delete";
448 case GOMP_MAP_FORCE_PRESENT: return "present";
449 case GOMP_MAP_ATTACH: return "attach";
450 case GOMP_MAP_FORCE_DETACH:
451 case GOMP_MAP_DETACH: return "detach";
452 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
453 case GOMP_MAP_LINK: return "link";
454 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
455 default: break;
458 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
462 /* Return the tree node structure used by tree code CODE. */
464 static inline enum tree_node_structure_enum
465 tree_node_structure_for_code (enum tree_code code)
467 switch (TREE_CODE_CLASS (code))
469 case tcc_declaration:
470 switch (code)
472 case CONST_DECL: return TS_CONST_DECL;
473 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
474 case FIELD_DECL: return TS_FIELD_DECL;
475 case FUNCTION_DECL: return TS_FUNCTION_DECL;
476 case LABEL_DECL: return TS_LABEL_DECL;
477 case PARM_DECL: return TS_PARM_DECL;
478 case RESULT_DECL: return TS_RESULT_DECL;
479 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
480 case TYPE_DECL: return TS_TYPE_DECL;
481 case VAR_DECL: return TS_VAR_DECL;
482 default: return TS_DECL_NON_COMMON;
485 case tcc_type: return TS_TYPE_NON_COMMON;
487 case tcc_binary:
488 case tcc_comparison:
489 case tcc_expression:
490 case tcc_reference:
491 case tcc_statement:
492 case tcc_unary:
493 case tcc_vl_exp: return TS_EXP;
495 default: /* tcc_constant and tcc_exceptional */
496 break;
499 switch (code)
501 /* tcc_constant cases. */
502 case COMPLEX_CST: return TS_COMPLEX;
503 case FIXED_CST: return TS_FIXED_CST;
504 case INTEGER_CST: return TS_INT_CST;
505 case POLY_INT_CST: return TS_POLY_INT_CST;
506 case REAL_CST: return TS_REAL_CST;
507 case STRING_CST: return TS_STRING;
508 case VECTOR_CST: return TS_VECTOR;
509 case VOID_CST: return TS_TYPED;
511 /* tcc_exceptional cases. */
512 case BLOCK: return TS_BLOCK;
513 case CONSTRUCTOR: return TS_CONSTRUCTOR;
514 case ERROR_MARK: return TS_COMMON;
515 case IDENTIFIER_NODE: return TS_IDENTIFIER;
516 case OMP_CLAUSE: return TS_OMP_CLAUSE;
517 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
518 case PLACEHOLDER_EXPR: return TS_COMMON;
519 case SSA_NAME: return TS_SSA_NAME;
520 case STATEMENT_LIST: return TS_STATEMENT_LIST;
521 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
522 case TREE_BINFO: return TS_BINFO;
523 case TREE_LIST: return TS_LIST;
524 case TREE_VEC: return TS_VEC;
526 default:
527 gcc_unreachable ();
532 /* Initialize tree_contains_struct to describe the hierarchy of tree
533 nodes. */
535 static void
536 initialize_tree_contains_struct (void)
538 unsigned i;
540 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
542 enum tree_code code;
543 enum tree_node_structure_enum ts_code;
545 code = (enum tree_code) i;
546 ts_code = tree_node_structure_for_code (code);
548 /* Mark the TS structure itself. */
549 tree_contains_struct[code][ts_code] = 1;
551 /* Mark all the structures that TS is derived from. */
552 switch (ts_code)
554 case TS_TYPED:
555 case TS_BLOCK:
556 case TS_OPTIMIZATION:
557 case TS_TARGET_OPTION:
558 MARK_TS_BASE (code);
559 break;
561 case TS_COMMON:
562 case TS_INT_CST:
563 case TS_POLY_INT_CST:
564 case TS_REAL_CST:
565 case TS_FIXED_CST:
566 case TS_VECTOR:
567 case TS_STRING:
568 case TS_COMPLEX:
569 case TS_SSA_NAME:
570 case TS_CONSTRUCTOR:
571 case TS_EXP:
572 case TS_STATEMENT_LIST:
573 MARK_TS_TYPED (code);
574 break;
576 case TS_IDENTIFIER:
577 case TS_DECL_MINIMAL:
578 case TS_TYPE_COMMON:
579 case TS_LIST:
580 case TS_VEC:
581 case TS_BINFO:
582 case TS_OMP_CLAUSE:
583 MARK_TS_COMMON (code);
584 break;
586 case TS_TYPE_WITH_LANG_SPECIFIC:
587 MARK_TS_TYPE_COMMON (code);
588 break;
590 case TS_TYPE_NON_COMMON:
591 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
592 break;
594 case TS_DECL_COMMON:
595 MARK_TS_DECL_MINIMAL (code);
596 break;
598 case TS_DECL_WRTL:
599 case TS_CONST_DECL:
600 MARK_TS_DECL_COMMON (code);
601 break;
603 case TS_DECL_NON_COMMON:
604 MARK_TS_DECL_WITH_VIS (code);
605 break;
607 case TS_DECL_WITH_VIS:
608 case TS_PARM_DECL:
609 case TS_LABEL_DECL:
610 case TS_RESULT_DECL:
611 MARK_TS_DECL_WRTL (code);
612 break;
614 case TS_FIELD_DECL:
615 MARK_TS_DECL_COMMON (code);
616 break;
618 case TS_VAR_DECL:
619 MARK_TS_DECL_WITH_VIS (code);
620 break;
622 case TS_TYPE_DECL:
623 case TS_FUNCTION_DECL:
624 MARK_TS_DECL_NON_COMMON (code);
625 break;
627 case TS_TRANSLATION_UNIT_DECL:
628 MARK_TS_DECL_COMMON (code);
629 break;
631 default:
632 gcc_unreachable ();
636 /* Basic consistency checks for attributes used in fold. */
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
639 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
647 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
648 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
652 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
653 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
661 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
662 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
664 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
665 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
666 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
667 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
668 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
669 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
670 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
671 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
672 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
673 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
675 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
676 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
680 /* Init tree.cc. */
682 void
683 init_ttree (void)
685 /* Initialize the hash table of types. */
686 type_hash_table
687 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
689 debug_expr_for_decl
690 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
692 value_expr_for_decl
693 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
695 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
697 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
699 int_cst_node = make_int_cst (1, 1);
701 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
703 cl_optimization_node = make_node (OPTIMIZATION_NODE);
704 cl_target_option_node = make_node (TARGET_OPTION_NODE);
706 /* Initialize the tree_contains_struct array. */
707 initialize_tree_contains_struct ();
708 lang_hooks.init_ts ();
712 /* The name of the object as the assembler will see it (but before any
713 translations made by ASM_OUTPUT_LABELREF). Often this is the same
714 as DECL_NAME. It is an IDENTIFIER_NODE. */
715 tree
716 decl_assembler_name (tree decl)
718 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
719 lang_hooks.set_decl_assembler_name (decl);
720 return DECL_ASSEMBLER_NAME_RAW (decl);
723 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
724 (either of which may be NULL). Inform the FE, if this changes the
725 name. */
727 void
728 overwrite_decl_assembler_name (tree decl, tree name)
730 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
731 lang_hooks.overwrite_decl_assembler_name (decl, name);
734 /* Return true if DECL may need an assembler name to be set. */
736 static inline bool
737 need_assembler_name_p (tree decl)
739 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
740 Rule merging. This makes type_odr_p to return true on those types during
741 LTO and by comparing the mangled name, we can say what types are intended
742 to be equivalent across compilation unit.
744 We do not store names of type_in_anonymous_namespace_p.
746 Record, union and enumeration type have linkage that allows use
747 to check type_in_anonymous_namespace_p. We do not mangle compound types
748 that always can be compared structurally.
750 Similarly for builtin types, we compare properties of their main variant.
751 A special case are integer types where mangling do make differences
752 between char/signed char/unsigned char etc. Storing name for these makes
753 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
754 See cp/mangle.cc:write_builtin_type for details. */
756 if (TREE_CODE (decl) == TYPE_DECL)
758 if (DECL_NAME (decl)
759 && decl == TYPE_NAME (TREE_TYPE (decl))
760 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
761 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
762 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
763 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
764 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
765 && (type_with_linkage_p (TREE_TYPE (decl))
766 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
767 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
768 return !DECL_ASSEMBLER_NAME_SET_P (decl);
769 return false;
771 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
772 if (!VAR_OR_FUNCTION_DECL_P (decl))
773 return false;
775 /* If DECL already has its assembler name set, it does not need a
776 new one. */
777 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
778 || DECL_ASSEMBLER_NAME_SET_P (decl))
779 return false;
781 /* Abstract decls do not need an assembler name. */
782 if (DECL_ABSTRACT_P (decl))
783 return false;
785 /* For VAR_DECLs, only static, public and external symbols need an
786 assembler name. */
787 if (VAR_P (decl)
788 && !TREE_STATIC (decl)
789 && !TREE_PUBLIC (decl)
790 && !DECL_EXTERNAL (decl))
791 return false;
793 if (TREE_CODE (decl) == FUNCTION_DECL)
795 /* Do not set assembler name on builtins. Allow RTL expansion to
796 decide whether to expand inline or via a regular call. */
797 if (fndecl_built_in_p (decl)
798 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
799 return false;
801 /* Functions represented in the callgraph need an assembler name. */
802 if (cgraph_node::get (decl) != NULL)
803 return true;
805 /* Unused and not public functions don't need an assembler name. */
806 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
807 return false;
810 return true;
813 /* If T needs an assembler name, have one created for it. */
815 void
816 assign_assembler_name_if_needed (tree t)
818 if (need_assembler_name_p (t))
820 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
821 diagnostics that use input_location to show locus
822 information. The problem here is that, at this point,
823 input_location is generally anchored to the end of the file
824 (since the parser is long gone), so we don't have a good
825 position to pin it to.
827 To alleviate this problem, this uses the location of T's
828 declaration. Examples of this are
829 testsuite/g++.dg/template/cond2.C and
830 testsuite/g++.dg/template/pr35240.C. */
831 location_t saved_location = input_location;
832 input_location = DECL_SOURCE_LOCATION (t);
834 decl_assembler_name (t);
836 input_location = saved_location;
840 /* When the target supports COMDAT groups, this indicates which group the
841 DECL is associated with. This can be either an IDENTIFIER_NODE or a
842 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
843 tree
844 decl_comdat_group (const_tree node)
846 struct symtab_node *snode = symtab_node::get (node);
847 if (!snode)
848 return NULL;
849 return snode->get_comdat_group ();
852 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
853 tree
854 decl_comdat_group_id (const_tree node)
856 struct symtab_node *snode = symtab_node::get (node);
857 if (!snode)
858 return NULL;
859 return snode->get_comdat_group_id ();
862 /* When the target supports named section, return its name as IDENTIFIER_NODE
863 or NULL if it is in no section. */
864 const char *
865 decl_section_name (const_tree node)
867 struct symtab_node *snode = symtab_node::get (node);
868 if (!snode)
869 return NULL;
870 return snode->get_section ();
873 /* Set section name of NODE to VALUE (that is expected to be
874 identifier node) */
875 void
876 set_decl_section_name (tree node, const char *value)
878 struct symtab_node *snode;
880 if (value == NULL)
882 snode = symtab_node::get (node);
883 if (!snode)
884 return;
886 else if (VAR_P (node))
887 snode = varpool_node::get_create (node);
888 else
889 snode = cgraph_node::get_create (node);
890 snode->set_section (value);
893 /* Set section name of NODE to match the section name of OTHER.
895 set_decl_section_name (decl, other) is equivalent to
896 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
897 efficient. */
898 void
899 set_decl_section_name (tree decl, const_tree other)
901 struct symtab_node *other_node = symtab_node::get (other);
902 if (other_node)
904 struct symtab_node *decl_node;
905 if (VAR_P (decl))
906 decl_node = varpool_node::get_create (decl);
907 else
908 decl_node = cgraph_node::get_create (decl);
909 decl_node->set_section (*other_node);
911 else
913 struct symtab_node *decl_node = symtab_node::get (decl);
914 if (!decl_node)
915 return;
916 decl_node->set_section (NULL);
920 /* Return TLS model of a variable NODE. */
921 enum tls_model
922 decl_tls_model (const_tree node)
924 struct varpool_node *snode = varpool_node::get (node);
925 if (!snode)
926 return TLS_MODEL_NONE;
927 return snode->tls_model;
930 /* Set TLS model of variable NODE to MODEL. */
931 void
932 set_decl_tls_model (tree node, enum tls_model model)
934 struct varpool_node *vnode;
936 if (model == TLS_MODEL_NONE)
938 vnode = varpool_node::get (node);
939 if (!vnode)
940 return;
942 else
943 vnode = varpool_node::get_create (node);
944 vnode->tls_model = model;
947 /* Compute the number of bytes occupied by a tree with code CODE.
948 This function cannot be used for nodes that have variable sizes,
949 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
950 size_t
951 tree_code_size (enum tree_code code)
953 switch (TREE_CODE_CLASS (code))
955 case tcc_declaration: /* A decl node */
956 switch (code)
958 case FIELD_DECL: return sizeof (tree_field_decl);
959 case PARM_DECL: return sizeof (tree_parm_decl);
960 case VAR_DECL: return sizeof (tree_var_decl);
961 case LABEL_DECL: return sizeof (tree_label_decl);
962 case RESULT_DECL: return sizeof (tree_result_decl);
963 case CONST_DECL: return sizeof (tree_const_decl);
964 case TYPE_DECL: return sizeof (tree_type_decl);
965 case FUNCTION_DECL: return sizeof (tree_function_decl);
966 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
967 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
968 case NAMESPACE_DECL:
969 case IMPORTED_DECL:
970 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
971 default:
972 gcc_checking_assert (code >= NUM_TREE_CODES);
973 return lang_hooks.tree_size (code);
976 case tcc_type: /* a type node */
977 switch (code)
979 case OFFSET_TYPE:
980 case ENUMERAL_TYPE:
981 case BOOLEAN_TYPE:
982 case INTEGER_TYPE:
983 case REAL_TYPE:
984 case OPAQUE_TYPE:
985 case POINTER_TYPE:
986 case REFERENCE_TYPE:
987 case NULLPTR_TYPE:
988 case FIXED_POINT_TYPE:
989 case COMPLEX_TYPE:
990 case VECTOR_TYPE:
991 case ARRAY_TYPE:
992 case RECORD_TYPE:
993 case UNION_TYPE:
994 case QUAL_UNION_TYPE:
995 case VOID_TYPE:
996 case FUNCTION_TYPE:
997 case METHOD_TYPE:
998 case BITINT_TYPE:
999 case LANG_TYPE: return sizeof (tree_type_non_common);
1000 default:
1001 gcc_checking_assert (code >= NUM_TREE_CODES);
1002 return lang_hooks.tree_size (code);
1005 case tcc_reference: /* a reference */
1006 case tcc_expression: /* an expression */
1007 case tcc_statement: /* an expression with side effects */
1008 case tcc_comparison: /* a comparison expression */
1009 case tcc_unary: /* a unary arithmetic expression */
1010 case tcc_binary: /* a binary arithmetic expression */
1011 return (sizeof (struct tree_exp)
1012 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1014 case tcc_constant: /* a constant */
1015 switch (code)
1017 case VOID_CST: return sizeof (tree_typed);
1018 case INTEGER_CST: gcc_unreachable ();
1019 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1020 case REAL_CST: return sizeof (tree_real_cst);
1021 case FIXED_CST: return sizeof (tree_fixed_cst);
1022 case COMPLEX_CST: return sizeof (tree_complex);
1023 case VECTOR_CST: gcc_unreachable ();
1024 case STRING_CST: gcc_unreachable ();
1025 default:
1026 gcc_checking_assert (code >= NUM_TREE_CODES);
1027 return lang_hooks.tree_size (code);
1030 case tcc_exceptional: /* something random, like an identifier. */
1031 switch (code)
1033 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1034 case TREE_LIST: return sizeof (tree_list);
1036 case ERROR_MARK:
1037 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1039 case TREE_VEC: gcc_unreachable ();
1040 case OMP_CLAUSE: gcc_unreachable ();
1042 case SSA_NAME: return sizeof (tree_ssa_name);
1044 case STATEMENT_LIST: return sizeof (tree_statement_list);
1045 case BLOCK: return sizeof (struct tree_block);
1046 case CONSTRUCTOR: return sizeof (tree_constructor);
1047 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1048 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1050 default:
1051 gcc_checking_assert (code >= NUM_TREE_CODES);
1052 return lang_hooks.tree_size (code);
1055 default:
1056 gcc_unreachable ();
1060 /* Compute the number of bytes occupied by NODE. This routine only
1061 looks at TREE_CODE, except for those nodes that have variable sizes. */
1062 size_t
1063 tree_size (const_tree node)
1065 const enum tree_code code = TREE_CODE (node);
1066 switch (code)
1068 case INTEGER_CST:
1069 return (sizeof (struct tree_int_cst)
1070 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1072 case TREE_BINFO:
1073 return (offsetof (struct tree_binfo, base_binfos)
1074 + vec<tree, va_gc>
1075 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1077 case TREE_VEC:
1078 return (sizeof (struct tree_vec)
1079 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1081 case VECTOR_CST:
1082 return (sizeof (struct tree_vector)
1083 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1085 case STRING_CST:
1086 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1088 case OMP_CLAUSE:
1089 return (sizeof (struct tree_omp_clause)
1090 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1091 * sizeof (tree));
1093 default:
1094 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1095 return (sizeof (struct tree_exp)
1096 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1097 else
1098 return tree_code_size (code);
1102 /* Return tree node kind based on tree CODE. */
1104 static tree_node_kind
1105 get_stats_node_kind (enum tree_code code)
1107 enum tree_code_class type = TREE_CODE_CLASS (code);
1109 switch (type)
1111 case tcc_declaration: /* A decl node */
1112 return d_kind;
1113 case tcc_type: /* a type node */
1114 return t_kind;
1115 case tcc_statement: /* an expression with side effects */
1116 return s_kind;
1117 case tcc_reference: /* a reference */
1118 return r_kind;
1119 case tcc_expression: /* an expression */
1120 case tcc_comparison: /* a comparison expression */
1121 case tcc_unary: /* a unary arithmetic expression */
1122 case tcc_binary: /* a binary arithmetic expression */
1123 return e_kind;
1124 case tcc_constant: /* a constant */
1125 return c_kind;
1126 case tcc_exceptional: /* something random, like an identifier. */
1127 switch (code)
1129 case IDENTIFIER_NODE:
1130 return id_kind;
1131 case TREE_VEC:
1132 return vec_kind;
1133 case TREE_BINFO:
1134 return binfo_kind;
1135 case SSA_NAME:
1136 return ssa_name_kind;
1137 case BLOCK:
1138 return b_kind;
1139 case CONSTRUCTOR:
1140 return constr_kind;
1141 case OMP_CLAUSE:
1142 return omp_clause_kind;
1143 default:
1144 return x_kind;
1146 break;
1147 case tcc_vl_exp:
1148 return e_kind;
1149 default:
1150 gcc_unreachable ();
1154 /* Record interesting allocation statistics for a tree node with CODE
1155 and LENGTH. */
1157 static void
1158 record_node_allocation_statistics (enum tree_code code, size_t length)
1160 if (!GATHER_STATISTICS)
1161 return;
1163 tree_node_kind kind = get_stats_node_kind (code);
1165 tree_code_counts[(int) code]++;
1166 tree_node_counts[(int) kind]++;
1167 tree_node_sizes[(int) kind] += length;
1170 /* Allocate and return a new UID from the DECL_UID namespace. */
1173 allocate_decl_uid (void)
1175 return next_decl_uid++;
1178 /* Return a newly allocated node of code CODE. For decl and type
1179 nodes, some other fields are initialized. The rest of the node is
1180 initialized to zero. This function cannot be used for TREE_VEC,
1181 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1182 tree_code_size.
1184 Achoo! I got a code in the node. */
1186 tree
1187 make_node (enum tree_code code MEM_STAT_DECL)
1189 tree t;
1190 enum tree_code_class type = TREE_CODE_CLASS (code);
1191 size_t length = tree_code_size (code);
1193 record_node_allocation_statistics (code, length);
1195 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1196 TREE_SET_CODE (t, code);
1198 switch (type)
1200 case tcc_statement:
1201 if (code != DEBUG_BEGIN_STMT)
1202 TREE_SIDE_EFFECTS (t) = 1;
1203 break;
1205 case tcc_declaration:
1206 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1208 if (code == FUNCTION_DECL)
1210 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1211 SET_DECL_MODE (t, FUNCTION_MODE);
1213 else
1214 SET_DECL_ALIGN (t, 1);
1216 DECL_SOURCE_LOCATION (t) = input_location;
1217 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1218 DECL_UID (t) = --next_debug_decl_uid;
1219 else
1221 DECL_UID (t) = allocate_decl_uid ();
1222 SET_DECL_PT_UID (t, -1);
1224 if (TREE_CODE (t) == LABEL_DECL)
1225 LABEL_DECL_UID (t) = -1;
1227 break;
1229 case tcc_type:
1230 TYPE_UID (t) = next_type_uid++;
1231 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1232 TYPE_USER_ALIGN (t) = 0;
1233 TYPE_MAIN_VARIANT (t) = t;
1234 TYPE_CANONICAL (t) = t;
1236 /* Default to no attributes for type, but let target change that. */
1237 TYPE_ATTRIBUTES (t) = NULL_TREE;
1238 targetm.set_default_type_attributes (t);
1240 /* We have not yet computed the alias set for this type. */
1241 TYPE_ALIAS_SET (t) = -1;
1242 break;
1244 case tcc_constant:
1245 TREE_CONSTANT (t) = 1;
1246 break;
1248 case tcc_expression:
1249 switch (code)
1251 case INIT_EXPR:
1252 case MODIFY_EXPR:
1253 case VA_ARG_EXPR:
1254 case PREDECREMENT_EXPR:
1255 case PREINCREMENT_EXPR:
1256 case POSTDECREMENT_EXPR:
1257 case POSTINCREMENT_EXPR:
1258 /* All of these have side-effects, no matter what their
1259 operands are. */
1260 TREE_SIDE_EFFECTS (t) = 1;
1261 break;
1263 default:
1264 break;
1266 break;
1268 case tcc_exceptional:
1269 switch (code)
1271 case TARGET_OPTION_NODE:
1272 TREE_TARGET_OPTION(t)
1273 = ggc_cleared_alloc<struct cl_target_option> ();
1274 break;
1276 case OPTIMIZATION_NODE:
1277 TREE_OPTIMIZATION (t)
1278 = ggc_cleared_alloc<struct cl_optimization> ();
1279 break;
1281 default:
1282 break;
1284 break;
1286 default:
1287 /* Other classes need no special treatment. */
1288 break;
1291 return t;
1294 /* Free tree node. */
1296 void
1297 free_node (tree node)
1299 enum tree_code code = TREE_CODE (node);
1300 if (GATHER_STATISTICS)
1302 enum tree_node_kind kind = get_stats_node_kind (code);
1304 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1305 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1306 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1308 tree_code_counts[(int) TREE_CODE (node)]--;
1309 tree_node_counts[(int) kind]--;
1310 tree_node_sizes[(int) kind] -= tree_size (node);
1312 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1313 vec_free (CONSTRUCTOR_ELTS (node));
1314 else if (code == BLOCK)
1315 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1316 else if (code == TREE_BINFO)
1317 vec_free (BINFO_BASE_ACCESSES (node));
1318 else if (code == OPTIMIZATION_NODE)
1319 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1320 else if (code == TARGET_OPTION_NODE)
1321 cl_target_option_free (TREE_TARGET_OPTION (node));
1322 ggc_free (node);
1325 /* Return a new node with the same contents as NODE except that its
1326 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1328 tree
1329 copy_node (tree node MEM_STAT_DECL)
1331 tree t;
1332 enum tree_code code = TREE_CODE (node);
1333 size_t length;
1335 gcc_assert (code != STATEMENT_LIST);
1337 length = tree_size (node);
1338 record_node_allocation_statistics (code, length);
1339 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1340 memcpy (t, node, length);
1342 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1343 TREE_CHAIN (t) = 0;
1344 TREE_ASM_WRITTEN (t) = 0;
1345 TREE_VISITED (t) = 0;
1347 if (TREE_CODE_CLASS (code) == tcc_declaration)
1349 if (code == DEBUG_EXPR_DECL)
1350 DECL_UID (t) = --next_debug_decl_uid;
1351 else
1353 DECL_UID (t) = allocate_decl_uid ();
1354 if (DECL_PT_UID_SET_P (node))
1355 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1357 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1358 && DECL_HAS_VALUE_EXPR_P (node))
1360 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1361 DECL_HAS_VALUE_EXPR_P (t) = 1;
1363 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1364 if (VAR_P (node))
1366 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1367 t->decl_with_vis.symtab_node = NULL;
1369 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1371 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1372 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1374 if (TREE_CODE (node) == FUNCTION_DECL)
1376 DECL_STRUCT_FUNCTION (t) = NULL;
1377 t->decl_with_vis.symtab_node = NULL;
1380 else if (TREE_CODE_CLASS (code) == tcc_type)
1382 TYPE_UID (t) = next_type_uid++;
1383 /* The following is so that the debug code for
1384 the copy is different from the original type.
1385 The two statements usually duplicate each other
1386 (because they clear fields of the same union),
1387 but the optimizer should catch that. */
1388 TYPE_SYMTAB_ADDRESS (t) = 0;
1389 TYPE_SYMTAB_DIE (t) = 0;
1391 /* Do not copy the values cache. */
1392 if (TYPE_CACHED_VALUES_P (t))
1394 TYPE_CACHED_VALUES_P (t) = 0;
1395 TYPE_CACHED_VALUES (t) = NULL_TREE;
1398 else if (code == TARGET_OPTION_NODE)
1400 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1401 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1402 sizeof (struct cl_target_option));
1404 else if (code == OPTIMIZATION_NODE)
1406 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1407 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1408 sizeof (struct cl_optimization));
1411 return t;
1414 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1415 For example, this can copy a list made of TREE_LIST nodes. */
1417 tree
1418 copy_list (tree list)
1420 tree head;
1421 tree prev, next;
1423 if (list == 0)
1424 return 0;
1426 head = prev = copy_node (list);
1427 next = TREE_CHAIN (list);
1428 while (next)
1430 TREE_CHAIN (prev) = copy_node (next);
1431 prev = TREE_CHAIN (prev);
1432 next = TREE_CHAIN (next);
1434 return head;
1438 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1439 INTEGER_CST with value CST and type TYPE. */
1441 static unsigned int
1442 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1444 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1445 /* We need extra HWIs if CST is an unsigned integer with its
1446 upper bit set. */
1447 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1448 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1449 return cst.get_len ();
1452 /* Return a new INTEGER_CST with value CST and type TYPE. */
1454 static tree
1455 build_new_int_cst (tree type, const wide_int &cst)
1457 unsigned int len = cst.get_len ();
1458 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1459 tree nt = make_int_cst (len, ext_len);
1461 if (len < ext_len)
1463 --ext_len;
1464 TREE_INT_CST_ELT (nt, ext_len)
1465 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1466 for (unsigned int i = len; i < ext_len; ++i)
1467 TREE_INT_CST_ELT (nt, i) = -1;
1469 else if (TYPE_UNSIGNED (type)
1470 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1472 len--;
1473 TREE_INT_CST_ELT (nt, len)
1474 = zext_hwi (cst.elt (len),
1475 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1478 for (unsigned int i = 0; i < len; i++)
1479 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1480 TREE_TYPE (nt) = type;
1481 return nt;
1484 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1486 static tree
1487 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1488 CXX_MEM_STAT_INFO)
1490 size_t length = sizeof (struct tree_poly_int_cst);
1491 record_node_allocation_statistics (POLY_INT_CST, length);
1493 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1495 TREE_SET_CODE (t, POLY_INT_CST);
1496 TREE_CONSTANT (t) = 1;
1497 TREE_TYPE (t) = type;
1498 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1499 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1500 return t;
1503 /* Create a constant tree that contains CST sign-extended to TYPE. */
1505 tree
1506 build_int_cst (tree type, poly_int64 cst)
1508 /* Support legacy code. */
1509 if (!type)
1510 type = integer_type_node;
1512 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1515 /* Create a constant tree that contains CST zero-extended to TYPE. */
1517 tree
1518 build_int_cstu (tree type, poly_uint64 cst)
1520 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1523 /* Create a constant tree that contains CST sign-extended to TYPE. */
1525 tree
1526 build_int_cst_type (tree type, poly_int64 cst)
1528 gcc_assert (type);
1529 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1532 /* Constructs tree in type TYPE from with value given by CST. Signedness
1533 of CST is assumed to be the same as the signedness of TYPE. */
1535 tree
1536 double_int_to_tree (tree type, double_int cst)
1538 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1541 /* We force the wide_int CST to the range of the type TYPE by sign or
1542 zero extending it. OVERFLOWABLE indicates if we are interested in
1543 overflow of the value, when >0 we are only interested in signed
1544 overflow, for <0 we are interested in any overflow. OVERFLOWED
1545 indicates whether overflow has already occurred. CONST_OVERFLOWED
1546 indicates whether constant overflow has already occurred. We force
1547 T's value to be within range of T's type (by setting to 0 or 1 all
1548 the bits outside the type's range). We set TREE_OVERFLOWED if,
1549 OVERFLOWED is nonzero,
1550 or OVERFLOWABLE is >0 and signed overflow occurs
1551 or OVERFLOWABLE is <0 and any overflow occurs
1552 We return a new tree node for the extended wide_int. The node
1553 is shared if no overflow flags are set. */
1556 tree
1557 force_fit_type (tree type, const poly_wide_int_ref &cst,
1558 int overflowable, bool overflowed)
1560 signop sign = TYPE_SIGN (type);
1562 /* If we need to set overflow flags, return a new unshared node. */
1563 if (overflowed || !wi::fits_to_tree_p (cst, type))
1565 if (overflowed
1566 || overflowable < 0
1567 || (overflowable > 0 && sign == SIGNED))
1569 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1570 sign);
1571 tree t;
1572 if (tmp.is_constant ())
1573 t = build_new_int_cst (type, tmp.coeffs[0]);
1574 else
1576 tree coeffs[NUM_POLY_INT_COEFFS];
1577 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1579 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1580 TREE_OVERFLOW (coeffs[i]) = 1;
1582 t = build_new_poly_int_cst (type, coeffs);
1584 TREE_OVERFLOW (t) = 1;
1585 return t;
1589 /* Else build a shared node. */
1590 return wide_int_to_tree (type, cst);
1593 /* These are the hash table functions for the hash table of INTEGER_CST
1594 nodes of a sizetype. */
1596 /* Return the hash code X, an INTEGER_CST. */
1598 hashval_t
1599 int_cst_hasher::hash (tree x)
1601 const_tree const t = x;
1602 hashval_t code = TYPE_UID (TREE_TYPE (t));
1603 int i;
1605 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1606 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1608 return code;
1611 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1612 is the same as that given by *Y, which is the same. */
1614 bool
1615 int_cst_hasher::equal (tree x, tree y)
1617 const_tree const xt = x;
1618 const_tree const yt = y;
1620 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1621 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1622 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1623 return false;
1625 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1626 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1627 return false;
1629 return true;
1632 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1633 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1634 number of slots that can be cached for the type. */
1636 static inline tree
1637 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1638 int slot, int max_slots)
1640 gcc_checking_assert (slot >= 0);
1641 /* Initialize cache. */
1642 if (!TYPE_CACHED_VALUES_P (type))
1644 TYPE_CACHED_VALUES_P (type) = 1;
1645 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1647 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1648 if (!t)
1650 /* Create a new shared int. */
1651 t = build_new_int_cst (type, cst);
1652 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1654 return t;
1657 /* Create an INT_CST node of TYPE and value CST.
1658 The returned node is always shared. For small integers we use a
1659 per-type vector cache, for larger ones we use a single hash table.
1660 The value is extended from its precision according to the sign of
1661 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1662 the upper bits and ensures that hashing and value equality based
1663 upon the underlying HOST_WIDE_INTs works without masking. */
1665 static tree
1666 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1668 tree t;
1669 int ix = -1;
1670 int limit = 0;
1672 gcc_assert (type);
1673 unsigned int prec = TYPE_PRECISION (type);
1674 signop sgn = TYPE_SIGN (type);
1676 /* Verify that everything is canonical. */
1677 int l = pcst.get_len ();
1678 if (l > 1)
1680 if (pcst.elt (l - 1) == 0)
1681 gcc_checking_assert (pcst.elt (l - 2) < 0);
1682 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1683 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1686 wide_int cst = wide_int::from (pcst, prec, sgn);
1687 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1689 enum tree_code code = TREE_CODE (type);
1690 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1692 /* Cache NULL pointer and zero bounds. */
1693 if (cst == 0)
1694 ix = 0;
1695 /* Cache upper bounds of pointers. */
1696 else if (cst == wi::max_value (prec, sgn))
1697 ix = 1;
1698 /* Cache 1 which is used for a non-zero range. */
1699 else if (cst == 1)
1700 ix = 2;
1702 if (ix >= 0)
1704 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1705 /* Make sure no one is clobbering the shared constant. */
1706 gcc_checking_assert (TREE_TYPE (t) == type
1707 && cst == wi::to_wide (t));
1708 return t;
1711 if (ext_len == 1)
1713 /* We just need to store a single HOST_WIDE_INT. */
1714 HOST_WIDE_INT hwi;
1715 if (TYPE_UNSIGNED (type))
1716 hwi = cst.to_uhwi ();
1717 else
1718 hwi = cst.to_shwi ();
1720 switch (code)
1722 case NULLPTR_TYPE:
1723 gcc_assert (hwi == 0);
1724 /* Fallthru. */
1726 case POINTER_TYPE:
1727 case REFERENCE_TYPE:
1728 /* Ignore pointers, as they were already handled above. */
1729 break;
1731 case BOOLEAN_TYPE:
1732 /* Cache false or true. */
1733 limit = 2;
1734 if (IN_RANGE (hwi, 0, 1))
1735 ix = hwi;
1736 break;
1738 case INTEGER_TYPE:
1739 case OFFSET_TYPE:
1740 case BITINT_TYPE:
1741 if (TYPE_SIGN (type) == UNSIGNED)
1743 /* Cache [0, N). */
1744 limit = param_integer_share_limit;
1745 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1746 ix = hwi;
1748 else
1750 /* Cache [-1, N). */
1751 limit = param_integer_share_limit + 1;
1752 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1753 ix = hwi + 1;
1755 break;
1757 case ENUMERAL_TYPE:
1758 break;
1760 default:
1761 gcc_unreachable ();
1764 if (ix >= 0)
1766 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1767 /* Make sure no one is clobbering the shared constant. */
1768 gcc_checking_assert (TREE_TYPE (t) == type
1769 && TREE_INT_CST_NUNITS (t) == 1
1770 && TREE_INT_CST_EXT_NUNITS (t) == 1
1771 && TREE_INT_CST_ELT (t, 0) == hwi);
1772 return t;
1774 else
1776 /* Use the cache of larger shared ints, using int_cst_node as
1777 a temporary. */
1779 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1780 TREE_TYPE (int_cst_node) = type;
1782 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1783 t = *slot;
1784 if (!t)
1786 /* Insert this one into the hash table. */
1787 t = int_cst_node;
1788 *slot = t;
1789 /* Make a new node for next time round. */
1790 int_cst_node = make_int_cst (1, 1);
1794 else
1796 /* The value either hashes properly or we drop it on the floor
1797 for the gc to take care of. There will not be enough of them
1798 to worry about. */
1800 tree nt = build_new_int_cst (type, cst);
1801 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1802 t = *slot;
1803 if (!t)
1805 /* Insert this one into the hash table. */
1806 t = nt;
1807 *slot = t;
1809 else
1810 ggc_free (nt);
1813 return t;
1816 hashval_t
1817 poly_int_cst_hasher::hash (tree t)
1819 inchash::hash hstate;
1821 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1822 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1823 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1825 return hstate.end ();
1828 bool
1829 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1831 if (TREE_TYPE (x) != y.first)
1832 return false;
1833 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1834 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1835 return false;
1836 return true;
1839 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1840 The elements must also have type TYPE. */
1842 tree
1843 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1845 unsigned int prec = TYPE_PRECISION (type);
1846 gcc_assert (prec <= values.coeffs[0].get_precision ());
1847 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1849 inchash::hash h;
1850 h.add_int (TYPE_UID (type));
1851 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1852 h.add_wide_int (c.coeffs[i]);
1853 poly_int_cst_hasher::compare_type comp (type, &c);
1854 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1855 INSERT);
1856 if (*slot == NULL_TREE)
1858 tree coeffs[NUM_POLY_INT_COEFFS];
1859 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1860 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1861 *slot = build_new_poly_int_cst (type, coeffs);
1863 return *slot;
1866 /* Create a constant tree with value VALUE in type TYPE. */
1868 tree
1869 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1871 if (value.is_constant ())
1872 return wide_int_to_tree_1 (type, value.coeffs[0]);
1873 return build_poly_int_cst (type, value);
1876 /* Insert INTEGER_CST T into a cache of integer constants. And return
1877 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1878 is false, and T falls into the type's 'smaller values' range, there
1879 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1880 or the value is large, should an existing entry exist, it is
1881 returned (rather than inserting T). */
1883 tree
1884 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1886 tree type = TREE_TYPE (t);
1887 int ix = -1;
1888 int limit = 0;
1889 int prec = TYPE_PRECISION (type);
1891 gcc_assert (!TREE_OVERFLOW (t));
1893 /* The caching indices here must match those in
1894 wide_int_to_type_1. */
1895 switch (TREE_CODE (type))
1897 case NULLPTR_TYPE:
1898 gcc_checking_assert (integer_zerop (t));
1899 /* Fallthru. */
1901 case POINTER_TYPE:
1902 case REFERENCE_TYPE:
1904 if (integer_zerop (t))
1905 ix = 0;
1906 else if (integer_onep (t))
1907 ix = 2;
1909 if (ix >= 0)
1910 limit = 3;
1912 break;
1914 case BOOLEAN_TYPE:
1915 /* Cache false or true. */
1916 limit = 2;
1917 if (wi::ltu_p (wi::to_wide (t), 2))
1918 ix = TREE_INT_CST_ELT (t, 0);
1919 break;
1921 case INTEGER_TYPE:
1922 case OFFSET_TYPE:
1923 case BITINT_TYPE:
1924 if (TYPE_UNSIGNED (type))
1926 /* Cache 0..N */
1927 limit = param_integer_share_limit;
1929 /* This is a little hokie, but if the prec is smaller than
1930 what is necessary to hold param_integer_share_limit, then the
1931 obvious test will not get the correct answer. */
1932 if (prec < HOST_BITS_PER_WIDE_INT)
1934 if (tree_to_uhwi (t)
1935 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1936 ix = tree_to_uhwi (t);
1938 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1939 ix = tree_to_uhwi (t);
1941 else
1943 /* Cache -1..N */
1944 limit = param_integer_share_limit + 1;
1946 if (integer_minus_onep (t))
1947 ix = 0;
1948 else if (!wi::neg_p (wi::to_wide (t)))
1950 if (prec < HOST_BITS_PER_WIDE_INT)
1952 if (tree_to_shwi (t) < param_integer_share_limit)
1953 ix = tree_to_shwi (t) + 1;
1955 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1956 ix = tree_to_shwi (t) + 1;
1959 break;
1961 case ENUMERAL_TYPE:
1962 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1963 members. */
1964 break;
1966 default:
1967 gcc_unreachable ();
1970 if (ix >= 0)
1972 /* Look for it in the type's vector of small shared ints. */
1973 if (!TYPE_CACHED_VALUES_P (type))
1975 TYPE_CACHED_VALUES_P (type) = 1;
1976 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1979 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1981 gcc_checking_assert (might_duplicate);
1982 t = r;
1984 else
1985 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1987 else
1989 /* Use the cache of larger shared ints. */
1990 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1991 if (tree r = *slot)
1993 /* If there is already an entry for the number verify it's the
1994 same value. */
1995 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1996 /* And return the cached value. */
1997 t = r;
1999 else
2000 /* Otherwise insert this one into the hash table. */
2001 *slot = t;
2004 return t;
2008 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2009 and the rest are zeros. */
2011 tree
2012 build_low_bits_mask (tree type, unsigned bits)
2014 gcc_assert (bits <= TYPE_PRECISION (type));
2016 return wide_int_to_tree (type, wi::mask (bits, false,
2017 TYPE_PRECISION (type)));
2020 /* Checks that X is integer constant that can be expressed in (unsigned)
2021 HOST_WIDE_INT without loss of precision. */
2023 bool
2024 cst_and_fits_in_hwi (const_tree x)
2026 return (TREE_CODE (x) == INTEGER_CST
2027 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2030 /* Build a newly constructed VECTOR_CST with the given values of
2031 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2033 tree
2034 make_vector (unsigned log2_npatterns,
2035 unsigned int nelts_per_pattern MEM_STAT_DECL)
2037 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2038 tree t;
2039 unsigned npatterns = 1 << log2_npatterns;
2040 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2041 unsigned length = (sizeof (struct tree_vector)
2042 + (encoded_nelts - 1) * sizeof (tree));
2044 record_node_allocation_statistics (VECTOR_CST, length);
2046 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2048 TREE_SET_CODE (t, VECTOR_CST);
2049 TREE_CONSTANT (t) = 1;
2050 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2051 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2053 return t;
2056 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2057 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2059 tree
2060 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2062 if (vec_safe_length (v) == 0)
2063 return build_zero_cst (type);
2065 unsigned HOST_WIDE_INT idx, nelts;
2066 tree value;
2068 /* We can't construct a VECTOR_CST for a variable number of elements. */
2069 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2070 tree_vector_builder vec (type, nelts, 1);
2071 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2073 if (TREE_CODE (value) == VECTOR_CST)
2075 /* If NELTS is constant then this must be too. */
2076 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2077 for (unsigned i = 0; i < sub_nelts; ++i)
2078 vec.quick_push (VECTOR_CST_ELT (value, i));
2080 else
2081 vec.quick_push (value);
2083 while (vec.length () < nelts)
2084 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2086 return vec.build ();
2089 /* Build a vector of type VECTYPE where all the elements are SCs. */
2090 tree
2091 build_vector_from_val (tree vectype, tree sc)
2093 unsigned HOST_WIDE_INT i, nunits;
2095 if (sc == error_mark_node)
2096 return sc;
2098 /* Verify that the vector type is suitable for SC. Note that there
2099 is some inconsistency in the type-system with respect to restrict
2100 qualifications of pointers. Vector types always have a main-variant
2101 element type and the qualification is applied to the vector-type.
2102 So TREE_TYPE (vector-type) does not return a properly qualified
2103 vector element-type. */
2104 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2105 TREE_TYPE (vectype)));
2107 if (CONSTANT_CLASS_P (sc))
2109 tree_vector_builder v (vectype, 1, 1);
2110 v.quick_push (sc);
2111 return v.build ();
2113 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2114 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2115 else
2117 vec<constructor_elt, va_gc> *v;
2118 vec_alloc (v, nunits);
2119 for (i = 0; i < nunits; ++i)
2120 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2121 return build_constructor (vectype, v);
2125 /* If TYPE is not a vector type, just return SC, otherwise return
2126 build_vector_from_val (TYPE, SC). */
2128 tree
2129 build_uniform_cst (tree type, tree sc)
2131 if (!VECTOR_TYPE_P (type))
2132 return sc;
2134 return build_vector_from_val (type, sc);
2137 /* Build a vector series of type TYPE in which element I has the value
2138 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2139 and a VEC_SERIES_EXPR otherwise. */
2141 tree
2142 build_vec_series (tree type, tree base, tree step)
2144 if (integer_zerop (step))
2145 return build_vector_from_val (type, base);
2146 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2148 tree_vector_builder builder (type, 1, 3);
2149 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2150 wi::to_wide (base) + wi::to_wide (step));
2151 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2152 wi::to_wide (elt1) + wi::to_wide (step));
2153 builder.quick_push (base);
2154 builder.quick_push (elt1);
2155 builder.quick_push (elt2);
2156 return builder.build ();
2158 return build2 (VEC_SERIES_EXPR, type, base, step);
2161 /* Return a vector with the same number of units and number of bits
2162 as VEC_TYPE, but in which the elements are a linear series of unsigned
2163 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2165 tree
2166 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2168 tree index_vec_type = vec_type;
2169 tree index_elt_type = TREE_TYPE (vec_type);
2170 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2171 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2173 index_elt_type = build_nonstandard_integer_type
2174 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2175 index_vec_type = build_vector_type (index_elt_type, nunits);
2178 tree_vector_builder v (index_vec_type, 1, 3);
2179 for (unsigned int i = 0; i < 3; ++i)
2180 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2181 return v.build ();
2184 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2185 elements are A and the rest are B. */
2187 tree
2188 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2190 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2191 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2192 /* Optimize the constant case. */
2193 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2194 count /= 2;
2195 tree_vector_builder builder (vec_type, count, 2);
2196 for (unsigned int i = 0; i < count * 2; ++i)
2197 builder.quick_push (i < num_a ? a : b);
2198 return builder.build ();
2201 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2202 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2204 void
2205 recompute_constructor_flags (tree c)
2207 unsigned int i;
2208 tree val;
2209 bool constant_p = true;
2210 bool side_effects_p = false;
2211 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2213 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2215 /* Mostly ctors will have elts that don't have side-effects, so
2216 the usual case is to scan all the elements. Hence a single
2217 loop for both const and side effects, rather than one loop
2218 each (with early outs). */
2219 if (!TREE_CONSTANT (val))
2220 constant_p = false;
2221 if (TREE_SIDE_EFFECTS (val))
2222 side_effects_p = true;
2225 TREE_SIDE_EFFECTS (c) = side_effects_p;
2226 TREE_CONSTANT (c) = constant_p;
2229 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2230 CONSTRUCTOR C. */
2232 void
2233 verify_constructor_flags (tree c)
2235 unsigned int i;
2236 tree val;
2237 bool constant_p = TREE_CONSTANT (c);
2238 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2239 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2241 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2243 if (constant_p && !TREE_CONSTANT (val))
2244 internal_error ("non-constant element in constant CONSTRUCTOR");
2245 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2246 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2250 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2251 are in the vec pointed to by VALS. */
2252 tree
2253 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2255 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2257 TREE_TYPE (c) = type;
2258 CONSTRUCTOR_ELTS (c) = vals;
2260 recompute_constructor_flags (c);
2262 return c;
2265 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2266 INDEX and VALUE. */
2267 tree
2268 build_constructor_single (tree type, tree index, tree value)
2270 vec<constructor_elt, va_gc> *v;
2271 constructor_elt elt = {index, value};
2273 vec_alloc (v, 1);
2274 v->quick_push (elt);
2276 return build_constructor (type, v);
2280 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2281 are in a list pointed to by VALS. */
2282 tree
2283 build_constructor_from_list (tree type, tree vals)
2285 tree t;
2286 vec<constructor_elt, va_gc> *v = NULL;
2288 if (vals)
2290 vec_alloc (v, list_length (vals));
2291 for (t = vals; t; t = TREE_CHAIN (t))
2292 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2295 return build_constructor (type, v);
2298 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2299 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2300 fields in the constructor remain null. */
2302 tree
2303 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2305 vec<constructor_elt, va_gc> *v = NULL;
2307 for (tree t : vals)
2308 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2310 return build_constructor (type, v);
2313 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2314 of elements, provided as index/value pairs. */
2316 tree
2317 build_constructor_va (tree type, int nelts, ...)
2319 vec<constructor_elt, va_gc> *v = NULL;
2320 va_list p;
2322 va_start (p, nelts);
2323 vec_alloc (v, nelts);
2324 while (nelts--)
2326 tree index = va_arg (p, tree);
2327 tree value = va_arg (p, tree);
2328 CONSTRUCTOR_APPEND_ELT (v, index, value);
2330 va_end (p);
2331 return build_constructor (type, v);
2334 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2336 tree
2337 build_clobber (tree type, enum clobber_kind kind)
2339 tree clobber = build_constructor (type, NULL);
2340 TREE_THIS_VOLATILE (clobber) = true;
2341 CLOBBER_KIND (clobber) = kind;
2342 return clobber;
2345 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2347 tree
2348 build_fixed (tree type, FIXED_VALUE_TYPE f)
2350 tree v;
2351 FIXED_VALUE_TYPE *fp;
2353 v = make_node (FIXED_CST);
2354 fp = ggc_alloc<fixed_value> ();
2355 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2357 TREE_TYPE (v) = type;
2358 TREE_FIXED_CST_PTR (v) = fp;
2359 return v;
2362 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2364 tree
2365 build_real (tree type, REAL_VALUE_TYPE d)
2367 tree v;
2368 int overflow = 0;
2370 /* dconst{0,1,2,m1,half} are used in various places in
2371 the middle-end and optimizers, allow them here
2372 even for decimal floating point types as an exception
2373 by converting them to decimal. */
2374 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2375 && (d.cl == rvc_normal || d.cl == rvc_zero)
2376 && !d.decimal)
2378 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "1");
2380 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "2");
2382 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2383 decimal_real_from_string (&d, "-1");
2384 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2385 decimal_real_from_string (&d, "0.5");
2386 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2388 /* Make sure to give zero the minimum quantum exponent for
2389 the type (which corresponds to all bits zero). */
2390 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2391 char buf[16];
2392 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2393 decimal_real_from_string (&d, buf);
2395 else
2396 gcc_unreachable ();
2399 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2400 Consider doing it via real_convert now. */
2402 v = make_node (REAL_CST);
2403 TREE_TYPE (v) = type;
2404 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2405 TREE_OVERFLOW (v) = overflow;
2406 return v;
2409 /* Like build_real, but first truncate D to the type. */
2411 tree
2412 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2414 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value of the INTEGER_CST node I. */
2420 REAL_VALUE_TYPE
2421 real_value_from_int_cst (const_tree type, const_tree i)
2423 REAL_VALUE_TYPE d;
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d, 0, sizeof d);
2429 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2430 TYPE_SIGN (TREE_TYPE (i)));
2431 return d;
2434 /* Given a tree representing an integer constant I, return a tree
2435 representing the same value as a floating-point constant of type TYPE. */
2437 tree
2438 build_real_from_int_cst (tree type, const_tree i)
2440 tree v;
2441 int overflow = TREE_OVERFLOW (i);
2443 v = build_real (type, real_value_from_int_cst (type, i));
2445 TREE_OVERFLOW (v) |= overflow;
2446 return v;
2449 /* Return a new REAL_CST node whose type is TYPE
2450 and whose value is the integer value I which has sign SGN. */
2452 tree
2453 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2455 REAL_VALUE_TYPE d;
2457 /* Clear all bits of the real value type so that we can later do
2458 bitwise comparisons to see if two values are the same. */
2459 memset (&d, 0, sizeof d);
2461 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2462 return build_real (type, d);
2465 /* Return a newly constructed STRING_CST node whose value is the LEN
2466 characters at STR when STR is nonnull, or all zeros otherwise.
2467 Note that for a C string literal, LEN should include the trailing NUL.
2468 The TREE_TYPE is not initialized. */
2470 tree
2471 build_string (unsigned len, const char *str /*= NULL */)
2473 /* Do not waste bytes provided by padding of struct tree_string. */
2474 unsigned size = len + offsetof (struct tree_string, str) + 1;
2476 record_node_allocation_statistics (STRING_CST, size);
2478 tree s = (tree) ggc_internal_alloc (size);
2480 memset (s, 0, sizeof (struct tree_typed));
2481 TREE_SET_CODE (s, STRING_CST);
2482 TREE_CONSTANT (s) = 1;
2483 TREE_STRING_LENGTH (s) = len;
2484 if (str)
2485 memcpy (s->string.str, str, len);
2486 else
2487 memset (s->string.str, 0, len);
2488 s->string.str[len] = '\0';
2490 return s;
2493 /* Return a newly constructed COMPLEX_CST node whose value is
2494 specified by the real and imaginary parts REAL and IMAG.
2495 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2496 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2498 tree
2499 build_complex (tree type, tree real, tree imag)
2501 gcc_assert (CONSTANT_CLASS_P (real));
2502 gcc_assert (CONSTANT_CLASS_P (imag));
2504 tree t = make_node (COMPLEX_CST);
2506 TREE_REALPART (t) = real;
2507 TREE_IMAGPART (t) = imag;
2508 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2509 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2510 return t;
2513 /* Build a complex (inf +- 0i), such as for the result of cproj.
2514 TYPE is the complex tree type of the result. If NEG is true, the
2515 imaginary zero is negative. */
2517 tree
2518 build_complex_inf (tree type, bool neg)
2520 REAL_VALUE_TYPE rzero = dconst0;
2522 rzero.sign = neg;
2523 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2524 build_real (TREE_TYPE (type), rzero));
2527 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2528 element is set to 1. In particular, this is 1 + i for complex types. */
2530 tree
2531 build_each_one_cst (tree type)
2533 if (TREE_CODE (type) == COMPLEX_TYPE)
2535 tree scalar = build_one_cst (TREE_TYPE (type));
2536 return build_complex (type, scalar, scalar);
2538 else
2539 return build_one_cst (type);
2542 /* Return a constant of arithmetic type TYPE which is the
2543 multiplicative identity of the set TYPE. */
2545 tree
2546 build_one_cst (tree type)
2548 switch (TREE_CODE (type))
2550 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2551 case POINTER_TYPE: case REFERENCE_TYPE:
2552 case OFFSET_TYPE: case BITINT_TYPE:
2553 return build_int_cst (type, 1);
2555 case REAL_TYPE:
2556 return build_real (type, dconst1);
2558 case FIXED_POINT_TYPE:
2559 /* We can only generate 1 for accum types. */
2560 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2561 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2563 case VECTOR_TYPE:
2565 tree scalar = build_one_cst (TREE_TYPE (type));
2567 return build_vector_from_val (type, scalar);
2570 case COMPLEX_TYPE:
2571 return build_complex (type,
2572 build_one_cst (TREE_TYPE (type)),
2573 build_zero_cst (TREE_TYPE (type)));
2575 default:
2576 gcc_unreachable ();
2580 /* Return an integer of type TYPE containing all 1's in as much precision as
2581 it contains, or a complex or vector whose subparts are such integers. */
2583 tree
2584 build_all_ones_cst (tree type)
2586 if (TREE_CODE (type) == COMPLEX_TYPE)
2588 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2589 return build_complex (type, scalar, scalar);
2591 else
2592 return build_minus_one_cst (type);
2595 /* Return a constant of arithmetic type TYPE which is the
2596 opposite of the multiplicative identity of the set TYPE. */
2598 tree
2599 build_minus_one_cst (tree type)
2601 switch (TREE_CODE (type))
2603 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2604 case POINTER_TYPE: case REFERENCE_TYPE:
2605 case OFFSET_TYPE: case BITINT_TYPE:
2606 return build_int_cst (type, -1);
2608 case REAL_TYPE:
2609 return build_real (type, dconstm1);
2611 case FIXED_POINT_TYPE:
2612 /* We can only generate 1 for accum types. */
2613 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2614 return build_fixed (type,
2615 fixed_from_double_int (double_int_minus_one,
2616 SCALAR_TYPE_MODE (type)));
2618 case VECTOR_TYPE:
2620 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2622 return build_vector_from_val (type, scalar);
2625 case COMPLEX_TYPE:
2626 return build_complex (type,
2627 build_minus_one_cst (TREE_TYPE (type)),
2628 build_zero_cst (TREE_TYPE (type)));
2630 default:
2631 gcc_unreachable ();
2635 /* Build 0 constant of type TYPE. This is used by constructor folding
2636 and thus the constant should be represented in memory by
2637 zero(es). */
2639 tree
2640 build_zero_cst (tree type)
2642 switch (TREE_CODE (type))
2644 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2645 case POINTER_TYPE: case REFERENCE_TYPE:
2646 case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
2647 return build_int_cst (type, 0);
2649 case REAL_TYPE:
2650 return build_real (type, dconst0);
2652 case FIXED_POINT_TYPE:
2653 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2655 case VECTOR_TYPE:
2657 tree scalar = build_zero_cst (TREE_TYPE (type));
2659 return build_vector_from_val (type, scalar);
2662 case COMPLEX_TYPE:
2664 tree zero = build_zero_cst (TREE_TYPE (type));
2666 return build_complex (type, zero, zero);
2669 default:
2670 if (!AGGREGATE_TYPE_P (type))
2671 return fold_convert (type, integer_zero_node);
2672 return build_constructor (type, NULL);
2676 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2677 every WIDTH bits to fit TYPE's precision. */
2679 tree
2680 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2682 int n = ((TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2683 / HOST_BITS_PER_WIDE_INT);
2684 unsigned HOST_WIDE_INT low, mask;
2685 HOST_WIDE_INT a[WIDE_INT_MAX_INL_ELTS];
2686 int i;
2688 gcc_assert (n && n <= WIDE_INT_MAX_INL_ELTS);
2690 if (width == HOST_BITS_PER_WIDE_INT)
2691 low = value;
2692 else
2694 mask = (HOST_WIDE_INT_1U << width) - 1;
2695 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2698 for (i = 0; i < n; i++)
2699 a[i] = low;
2701 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2702 return wide_int_to_tree (type, wide_int::from_array (a, n,
2703 TYPE_PRECISION (type)));
2706 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2707 unsigned constant in which only the sign bit is set. Return null
2708 otherwise. */
2710 tree
2711 sign_mask_for (tree type)
2713 /* Avoid having to choose between a real-only sign and a pair of signs.
2714 This could be relaxed if the choice becomes obvious later. */
2715 if (TREE_CODE (type) == COMPLEX_TYPE)
2716 return NULL_TREE;
2718 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2719 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2720 if (!bits || !pow2p_hwi (bits))
2721 return NULL_TREE;
2723 tree inttype = unsigned_type_for (type);
2724 if (!inttype)
2725 return NULL_TREE;
2727 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2728 if (VECTOR_TYPE_P (inttype))
2730 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2731 return build_vector_from_val (inttype, elt);
2733 return wide_int_to_tree (inttype, mask);
2736 /* Build a BINFO with LEN language slots. */
2738 tree
2739 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2741 tree t;
2742 size_t length = (offsetof (struct tree_binfo, base_binfos)
2743 + vec<tree, va_gc>::embedded_size (base_binfos));
2745 record_node_allocation_statistics (TREE_BINFO, length);
2747 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2749 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2751 TREE_SET_CODE (t, TREE_BINFO);
2753 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2755 return t;
2758 /* Create a CASE_LABEL_EXPR tree node and return it. */
2760 tree
2761 build_case_label (tree low_value, tree high_value, tree label_decl)
2763 tree t = make_node (CASE_LABEL_EXPR);
2765 TREE_TYPE (t) = void_type_node;
2766 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2768 CASE_LOW (t) = low_value;
2769 CASE_HIGH (t) = high_value;
2770 CASE_LABEL (t) = label_decl;
2771 CASE_CHAIN (t) = NULL_TREE;
2773 return t;
2776 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2777 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2778 The latter determines the length of the HOST_WIDE_INT vector. */
2780 tree
2781 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2783 tree t;
2784 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2785 + sizeof (struct tree_int_cst));
2787 gcc_assert (len);
2788 record_node_allocation_statistics (INTEGER_CST, length);
2790 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2792 TREE_SET_CODE (t, INTEGER_CST);
2793 TREE_INT_CST_NUNITS (t) = len;
2794 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2795 TREE_CONSTANT (t) = 1;
2797 return t;
2800 /* Build a newly constructed TREE_VEC node of length LEN. */
2802 tree
2803 make_tree_vec (int len MEM_STAT_DECL)
2805 tree t;
2806 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2808 record_node_allocation_statistics (TREE_VEC, length);
2810 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2812 TREE_SET_CODE (t, TREE_VEC);
2813 TREE_VEC_LENGTH (t) = len;
2815 return t;
2818 /* Grow a TREE_VEC node to new length LEN. */
2820 tree
2821 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2823 gcc_assert (TREE_CODE (v) == TREE_VEC);
2825 int oldlen = TREE_VEC_LENGTH (v);
2826 gcc_assert (len > oldlen);
2828 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2829 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2831 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2833 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2835 TREE_VEC_LENGTH (v) = len;
2837 return v;
2840 /* Return true if EXPR is the constant zero, whether it is integral, float or
2841 fixed, and scalar, complex or vector. */
2843 bool
2844 zerop (const_tree expr)
2846 return (integer_zerop (expr)
2847 || real_zerop (expr)
2848 || fixed_zerop (expr));
2851 /* Return true if EXPR is the integer constant zero or a complex constant
2852 of zero, or a location wrapper for such a constant. */
2854 bool
2855 integer_zerop (const_tree expr)
2857 STRIP_ANY_LOCATION_WRAPPER (expr);
2859 switch (TREE_CODE (expr))
2861 case INTEGER_CST:
2862 return wi::to_wide (expr) == 0;
2863 case COMPLEX_CST:
2864 return (integer_zerop (TREE_REALPART (expr))
2865 && integer_zerop (TREE_IMAGPART (expr)));
2866 case VECTOR_CST:
2867 return (VECTOR_CST_NPATTERNS (expr) == 1
2868 && VECTOR_CST_DUPLICATE_P (expr)
2869 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2870 default:
2871 return false;
2875 /* Return true if EXPR is the integer constant one or the corresponding
2876 complex constant, or a location wrapper for such a constant. */
2878 bool
2879 integer_onep (const_tree expr)
2881 STRIP_ANY_LOCATION_WRAPPER (expr);
2883 switch (TREE_CODE (expr))
2885 case INTEGER_CST:
2886 return wi::eq_p (wi::to_widest (expr), 1);
2887 case COMPLEX_CST:
2888 return (integer_onep (TREE_REALPART (expr))
2889 && integer_zerop (TREE_IMAGPART (expr)));
2890 case VECTOR_CST:
2891 return (VECTOR_CST_NPATTERNS (expr) == 1
2892 && VECTOR_CST_DUPLICATE_P (expr)
2893 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2894 default:
2895 return false;
2899 /* Return true if EXPR is the integer constant one. For complex and vector,
2900 return true if every piece is the integer constant one.
2901 Also return true for location wrappers for such a constant. */
2903 bool
2904 integer_each_onep (const_tree expr)
2906 STRIP_ANY_LOCATION_WRAPPER (expr);
2908 if (TREE_CODE (expr) == COMPLEX_CST)
2909 return (integer_onep (TREE_REALPART (expr))
2910 && integer_onep (TREE_IMAGPART (expr)));
2911 else
2912 return integer_onep (expr);
2915 /* Return true if EXPR is an integer containing all 1's in as much precision
2916 as it contains, or a complex or vector whose subparts are such integers,
2917 or a location wrapper for such a constant. */
2919 bool
2920 integer_all_onesp (const_tree expr)
2922 STRIP_ANY_LOCATION_WRAPPER (expr);
2924 if (TREE_CODE (expr) == COMPLEX_CST
2925 && integer_all_onesp (TREE_REALPART (expr))
2926 && integer_all_onesp (TREE_IMAGPART (expr)))
2927 return true;
2929 else if (TREE_CODE (expr) == VECTOR_CST)
2930 return (VECTOR_CST_NPATTERNS (expr) == 1
2931 && VECTOR_CST_DUPLICATE_P (expr)
2932 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2934 else if (TREE_CODE (expr) != INTEGER_CST)
2935 return false;
2937 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2938 == wi::to_wide (expr));
2941 /* Return true if EXPR is the integer constant minus one, or a location
2942 wrapper for such a constant. */
2944 bool
2945 integer_minus_onep (const_tree expr)
2947 STRIP_ANY_LOCATION_WRAPPER (expr);
2949 if (TREE_CODE (expr) == COMPLEX_CST)
2950 return (integer_all_onesp (TREE_REALPART (expr))
2951 && integer_zerop (TREE_IMAGPART (expr)));
2952 else
2953 return integer_all_onesp (expr);
2956 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2957 only one bit on), or a location wrapper for such a constant. */
2959 bool
2960 integer_pow2p (const_tree expr)
2962 STRIP_ANY_LOCATION_WRAPPER (expr);
2964 if (TREE_CODE (expr) == COMPLEX_CST
2965 && integer_pow2p (TREE_REALPART (expr))
2966 && integer_zerop (TREE_IMAGPART (expr)))
2967 return true;
2969 if (TREE_CODE (expr) != INTEGER_CST)
2970 return false;
2972 return wi::popcount (wi::to_wide (expr)) == 1;
2975 /* Return true if EXPR is an integer constant other than zero or a
2976 complex constant other than zero, or a location wrapper for such a
2977 constant. */
2979 bool
2980 integer_nonzerop (const_tree expr)
2982 STRIP_ANY_LOCATION_WRAPPER (expr);
2984 return ((TREE_CODE (expr) == INTEGER_CST
2985 && wi::to_wide (expr) != 0)
2986 || (TREE_CODE (expr) == COMPLEX_CST
2987 && (integer_nonzerop (TREE_REALPART (expr))
2988 || integer_nonzerop (TREE_IMAGPART (expr)))));
2991 /* Return true if EXPR is the integer constant one. For vector,
2992 return true if every piece is the integer constant minus one
2993 (representing the value TRUE).
2994 Also return true for location wrappers for such a constant. */
2996 bool
2997 integer_truep (const_tree expr)
2999 STRIP_ANY_LOCATION_WRAPPER (expr);
3001 if (TREE_CODE (expr) == VECTOR_CST)
3002 return integer_all_onesp (expr);
3003 return integer_onep (expr);
3006 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3007 for such a constant. */
3009 bool
3010 fixed_zerop (const_tree expr)
3012 STRIP_ANY_LOCATION_WRAPPER (expr);
3014 return (TREE_CODE (expr) == FIXED_CST
3015 && TREE_FIXED_CST (expr).data.is_zero ());
3018 /* Return the power of two represented by a tree node known to be a
3019 power of two. */
3022 tree_log2 (const_tree expr)
3024 if (TREE_CODE (expr) == COMPLEX_CST)
3025 return tree_log2 (TREE_REALPART (expr));
3027 return wi::exact_log2 (wi::to_wide (expr));
3030 /* Similar, but return the largest integer Y such that 2 ** Y is less
3031 than or equal to EXPR. */
3034 tree_floor_log2 (const_tree expr)
3036 if (TREE_CODE (expr) == COMPLEX_CST)
3037 return tree_log2 (TREE_REALPART (expr));
3039 return wi::floor_log2 (wi::to_wide (expr));
3042 /* Return number of known trailing zero bits in EXPR, or, if the value of
3043 EXPR is known to be zero, the precision of it's type. */
3045 unsigned int
3046 tree_ctz (const_tree expr)
3048 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3049 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3050 return 0;
3052 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3053 switch (TREE_CODE (expr))
3055 case INTEGER_CST:
3056 ret1 = wi::ctz (wi::to_wide (expr));
3057 return MIN (ret1, prec);
3058 case SSA_NAME:
3059 ret1 = wi::ctz (get_nonzero_bits (expr));
3060 return MIN (ret1, prec);
3061 case PLUS_EXPR:
3062 case MINUS_EXPR:
3063 case BIT_IOR_EXPR:
3064 case BIT_XOR_EXPR:
3065 case MIN_EXPR:
3066 case MAX_EXPR:
3067 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3068 if (ret1 == 0)
3069 return ret1;
3070 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3071 return MIN (ret1, ret2);
3072 case POINTER_PLUS_EXPR:
3073 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3074 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3075 /* Second operand is sizetype, which could be in theory
3076 wider than pointer's precision. Make sure we never
3077 return more than prec. */
3078 ret2 = MIN (ret2, prec);
3079 return MIN (ret1, ret2);
3080 case BIT_AND_EXPR:
3081 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3082 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3083 return MAX (ret1, ret2);
3084 case MULT_EXPR:
3085 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3086 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3087 return MIN (ret1 + ret2, prec);
3088 case LSHIFT_EXPR:
3089 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3090 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3091 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3093 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3094 return MIN (ret1 + ret2, prec);
3096 return ret1;
3097 case RSHIFT_EXPR:
3098 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3099 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3101 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3102 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3103 if (ret1 > ret2)
3104 return ret1 - ret2;
3106 return 0;
3107 case TRUNC_DIV_EXPR:
3108 case CEIL_DIV_EXPR:
3109 case FLOOR_DIV_EXPR:
3110 case ROUND_DIV_EXPR:
3111 case EXACT_DIV_EXPR:
3112 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3113 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3115 int l = tree_log2 (TREE_OPERAND (expr, 1));
3116 if (l >= 0)
3118 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3119 ret2 = l;
3120 if (ret1 > ret2)
3121 return ret1 - ret2;
3124 return 0;
3125 CASE_CONVERT:
3126 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3127 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3128 ret1 = prec;
3129 return MIN (ret1, prec);
3130 case SAVE_EXPR:
3131 return tree_ctz (TREE_OPERAND (expr, 0));
3132 case COND_EXPR:
3133 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3134 if (ret1 == 0)
3135 return 0;
3136 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3137 return MIN (ret1, ret2);
3138 case COMPOUND_EXPR:
3139 return tree_ctz (TREE_OPERAND (expr, 1));
3140 case ADDR_EXPR:
3141 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3142 if (ret1 > BITS_PER_UNIT)
3144 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3145 return MIN (ret1, prec);
3147 return 0;
3148 default:
3149 return 0;
3153 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3154 decimal float constants, so don't return true for them.
3155 Also return true for location wrappers around such a constant. */
3157 bool
3158 real_zerop (const_tree expr)
3160 STRIP_ANY_LOCATION_WRAPPER (expr);
3162 switch (TREE_CODE (expr))
3164 case REAL_CST:
3165 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3166 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3167 case COMPLEX_CST:
3168 return real_zerop (TREE_REALPART (expr))
3169 && real_zerop (TREE_IMAGPART (expr));
3170 case VECTOR_CST:
3172 /* Don't simply check for a duplicate because the predicate
3173 accepts both +0.0 and -0.0. */
3174 unsigned count = vector_cst_encoded_nelts (expr);
3175 for (unsigned int i = 0; i < count; ++i)
3176 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3177 return false;
3178 return true;
3180 default:
3181 return false;
3185 /* Return true if EXPR is the real constant one in real or complex form.
3186 Trailing zeroes matter for decimal float constants, so don't return
3187 true for them.
3188 Also return true for location wrappers around such a constant. */
3190 bool
3191 real_onep (const_tree expr)
3193 STRIP_ANY_LOCATION_WRAPPER (expr);
3195 switch (TREE_CODE (expr))
3197 case REAL_CST:
3198 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3199 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3200 case COMPLEX_CST:
3201 return real_onep (TREE_REALPART (expr))
3202 && real_zerop (TREE_IMAGPART (expr));
3203 case VECTOR_CST:
3204 return (VECTOR_CST_NPATTERNS (expr) == 1
3205 && VECTOR_CST_DUPLICATE_P (expr)
3206 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3207 default:
3208 return false;
3212 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3213 matter for decimal float constants, so don't return true for them.
3214 Also return true for location wrappers around such a constant. */
3216 bool
3217 real_minus_onep (const_tree expr)
3219 STRIP_ANY_LOCATION_WRAPPER (expr);
3221 switch (TREE_CODE (expr))
3223 case REAL_CST:
3224 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3225 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3226 case COMPLEX_CST:
3227 return real_minus_onep (TREE_REALPART (expr))
3228 && real_zerop (TREE_IMAGPART (expr));
3229 case VECTOR_CST:
3230 return (VECTOR_CST_NPATTERNS (expr) == 1
3231 && VECTOR_CST_DUPLICATE_P (expr)
3232 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3233 default:
3234 return false;
3238 /* Return true if T could be a floating point zero. */
3240 bool
3241 real_maybe_zerop (const_tree expr)
3243 switch (TREE_CODE (expr))
3245 case REAL_CST:
3246 /* Can't use real_zerop here, as it always returns false for decimal
3247 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3248 either, as decimal zeros are rvc_normal. */
3249 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3250 case COMPLEX_CST:
3251 return (real_maybe_zerop (TREE_REALPART (expr))
3252 || real_maybe_zerop (TREE_IMAGPART (expr)));
3253 case VECTOR_CST:
3255 unsigned count = vector_cst_encoded_nelts (expr);
3256 for (unsigned int i = 0; i < count; ++i)
3257 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3258 return true;
3259 return false;
3261 default:
3262 /* Perhaps for SSA_NAMEs we could query frange. */
3263 return true;
3267 /* True if EXP is a constant or a cast of a constant. */
3269 bool
3270 really_constant_p (const_tree exp)
3272 /* This is not quite the same as STRIP_NOPS. It does more. */
3273 while (CONVERT_EXPR_P (exp)
3274 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3275 exp = TREE_OPERAND (exp, 0);
3276 return TREE_CONSTANT (exp);
3279 /* Return true if T holds a polynomial pointer difference, storing it in
3280 *VALUE if so. A true return means that T's precision is no greater
3281 than 64 bits, which is the largest address space we support, so *VALUE
3282 never loses precision. However, the signedness of the result does
3283 not necessarily match the signedness of T: sometimes an unsigned type
3284 like sizetype is used to encode a value that is actually negative. */
3286 bool
3287 ptrdiff_tree_p (const_tree t, poly_int64 *value)
3289 if (!t)
3290 return false;
3291 if (TREE_CODE (t) == INTEGER_CST)
3293 if (!cst_and_fits_in_hwi (t))
3294 return false;
3295 *value = int_cst_value (t);
3296 return true;
3298 if (POLY_INT_CST_P (t))
3300 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3301 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3302 return false;
3303 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3304 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3305 return true;
3307 return false;
3310 poly_int64
3311 tree_to_poly_int64 (const_tree t)
3313 gcc_assert (tree_fits_poly_int64_p (t));
3314 if (POLY_INT_CST_P (t))
3315 return poly_int_cst_value (t).force_shwi ();
3316 return TREE_INT_CST_LOW (t);
3319 poly_uint64
3320 tree_to_poly_uint64 (const_tree t)
3322 gcc_assert (tree_fits_poly_uint64_p (t));
3323 if (POLY_INT_CST_P (t))
3324 return poly_int_cst_value (t).force_uhwi ();
3325 return TREE_INT_CST_LOW (t);
3328 /* Return first list element whose TREE_VALUE is ELEM.
3329 Return 0 if ELEM is not in LIST. */
3331 tree
3332 value_member (tree elem, tree list)
3334 while (list)
3336 if (elem == TREE_VALUE (list))
3337 return list;
3338 list = TREE_CHAIN (list);
3340 return NULL_TREE;
3343 /* Return first list element whose TREE_PURPOSE is ELEM.
3344 Return 0 if ELEM is not in LIST. */
3346 tree
3347 purpose_member (const_tree elem, tree list)
3349 while (list)
3351 if (elem == TREE_PURPOSE (list))
3352 return list;
3353 list = TREE_CHAIN (list);
3355 return NULL_TREE;
3358 /* Return true if ELEM is in V. */
3360 bool
3361 vec_member (const_tree elem, vec<tree, va_gc> *v)
3363 unsigned ix;
3364 tree t;
3365 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3366 if (elem == t)
3367 return true;
3368 return false;
3371 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3372 NULL_TREE. */
3374 tree
3375 chain_index (int idx, tree chain)
3377 for (; chain && idx > 0; --idx)
3378 chain = TREE_CHAIN (chain);
3379 return chain;
3382 /* Return true if ELEM is part of the chain CHAIN. */
3384 bool
3385 chain_member (const_tree elem, const_tree chain)
3387 while (chain)
3389 if (elem == chain)
3390 return true;
3391 chain = DECL_CHAIN (chain);
3394 return false;
3397 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3398 We expect a null pointer to mark the end of the chain.
3399 This is the Lisp primitive `length'. */
3402 list_length (const_tree t)
3404 const_tree p = t;
3405 #ifdef ENABLE_TREE_CHECKING
3406 const_tree q = t;
3407 #endif
3408 int len = 0;
3410 while (p)
3412 p = TREE_CHAIN (p);
3413 #ifdef ENABLE_TREE_CHECKING
3414 if (len % 2)
3415 q = TREE_CHAIN (q);
3416 gcc_assert (p != q);
3417 #endif
3418 len++;
3421 return len;
3424 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3425 UNION_TYPE TYPE, or NULL_TREE if none. */
3427 tree
3428 first_field (const_tree type)
3430 tree t = TYPE_FIELDS (type);
3431 while (t && TREE_CODE (t) != FIELD_DECL)
3432 t = TREE_CHAIN (t);
3433 return t;
3436 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3437 UNION_TYPE TYPE, or NULL_TREE if none. */
3439 tree
3440 last_field (const_tree type)
3442 tree last = NULL_TREE;
3444 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3446 if (TREE_CODE (fld) != FIELD_DECL)
3447 continue;
3449 last = fld;
3452 return last;
3455 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3456 by modifying the last node in chain 1 to point to chain 2.
3457 This is the Lisp primitive `nconc'. */
3459 tree
3460 chainon (tree op1, tree op2)
3462 tree t1;
3464 if (!op1)
3465 return op2;
3466 if (!op2)
3467 return op1;
3469 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3470 continue;
3471 TREE_CHAIN (t1) = op2;
3473 #ifdef ENABLE_TREE_CHECKING
3475 tree t2;
3476 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3477 gcc_assert (t2 != t1);
3479 #endif
3481 return op1;
3484 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3486 tree
3487 tree_last (tree chain)
3489 tree next;
3490 if (chain)
3491 while ((next = TREE_CHAIN (chain)))
3492 chain = next;
3493 return chain;
3496 /* Reverse the order of elements in the chain T,
3497 and return the new head of the chain (old last element). */
3499 tree
3500 nreverse (tree t)
3502 tree prev = 0, decl, next;
3503 for (decl = t; decl; decl = next)
3505 /* We shouldn't be using this function to reverse BLOCK chains; we
3506 have blocks_nreverse for that. */
3507 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3508 next = TREE_CHAIN (decl);
3509 TREE_CHAIN (decl) = prev;
3510 prev = decl;
3512 return prev;
3515 /* Return a newly created TREE_LIST node whose
3516 purpose and value fields are PARM and VALUE. */
3518 tree
3519 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3521 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3522 TREE_PURPOSE (t) = parm;
3523 TREE_VALUE (t) = value;
3524 return t;
3527 /* Build a chain of TREE_LIST nodes from a vector. */
3529 tree
3530 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3532 tree ret = NULL_TREE;
3533 tree *pp = &ret;
3534 unsigned int i;
3535 tree t;
3536 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3538 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3539 pp = &TREE_CHAIN (*pp);
3541 return ret;
3544 /* Return a newly created TREE_LIST node whose
3545 purpose and value fields are PURPOSE and VALUE
3546 and whose TREE_CHAIN is CHAIN. */
3548 tree
3549 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3551 tree node;
3553 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3554 memset (node, 0, sizeof (struct tree_common));
3556 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3558 TREE_SET_CODE (node, TREE_LIST);
3559 TREE_CHAIN (node) = chain;
3560 TREE_PURPOSE (node) = purpose;
3561 TREE_VALUE (node) = value;
3562 return node;
3565 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3566 trees. */
3568 vec<tree, va_gc> *
3569 ctor_to_vec (tree ctor)
3571 vec<tree, va_gc> *vec;
3572 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3573 unsigned int ix;
3574 tree val;
3576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3577 vec->quick_push (val);
3579 return vec;
3582 /* Return the size nominally occupied by an object of type TYPE
3583 when it resides in memory. The value is measured in units of bytes,
3584 and its data type is that normally used for type sizes
3585 (which is the first type created by make_signed_type or
3586 make_unsigned_type). */
3588 tree
3589 size_in_bytes_loc (location_t loc, const_tree type)
3591 tree t;
3593 if (type == error_mark_node)
3594 return integer_zero_node;
3596 type = TYPE_MAIN_VARIANT (type);
3597 t = TYPE_SIZE_UNIT (type);
3599 if (t == 0)
3601 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3602 return size_zero_node;
3605 return t;
3608 /* Return the size of TYPE (in bytes) as a wide integer
3609 or return -1 if the size can vary or is larger than an integer. */
3611 HOST_WIDE_INT
3612 int_size_in_bytes (const_tree type)
3614 tree t;
3616 if (type == error_mark_node)
3617 return 0;
3619 type = TYPE_MAIN_VARIANT (type);
3620 t = TYPE_SIZE_UNIT (type);
3622 if (t && tree_fits_uhwi_p (t))
3623 return TREE_INT_CST_LOW (t);
3624 else
3625 return -1;
3628 /* Return the maximum size of TYPE (in bytes) as a wide integer
3629 or return -1 if the size can vary or is larger than an integer. */
3631 HOST_WIDE_INT
3632 max_int_size_in_bytes (const_tree type)
3634 HOST_WIDE_INT size = -1;
3635 tree size_tree;
3637 /* If this is an array type, check for a possible MAX_SIZE attached. */
3639 if (TREE_CODE (type) == ARRAY_TYPE)
3641 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3643 if (size_tree && tree_fits_uhwi_p (size_tree))
3644 size = tree_to_uhwi (size_tree);
3647 /* If we still haven't been able to get a size, see if the language
3648 can compute a maximum size. */
3650 if (size == -1)
3652 size_tree = lang_hooks.types.max_size (type);
3654 if (size_tree && tree_fits_uhwi_p (size_tree))
3655 size = tree_to_uhwi (size_tree);
3658 return size;
3661 /* Return the bit position of FIELD, in bits from the start of the record.
3662 This is a tree of type bitsizetype. */
3664 tree
3665 bit_position (const_tree field)
3667 return bit_from_pos (DECL_FIELD_OFFSET (field),
3668 DECL_FIELD_BIT_OFFSET (field));
3671 /* Return the byte position of FIELD, in bytes from the start of the record.
3672 This is a tree of type sizetype. */
3674 tree
3675 byte_position (const_tree field)
3677 return byte_from_pos (DECL_FIELD_OFFSET (field),
3678 DECL_FIELD_BIT_OFFSET (field));
3681 /* Likewise, but return as an integer. It must be representable in
3682 that way (since it could be a signed value, we don't have the
3683 option of returning -1 like int_size_in_byte can. */
3685 HOST_WIDE_INT
3686 int_byte_position (const_tree field)
3688 return tree_to_shwi (byte_position (field));
3691 /* Return, as a tree node, the number of elements for TYPE (which is an
3692 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3694 tree
3695 array_type_nelts (const_tree type)
3697 tree index_type, min, max;
3699 /* If they did it with unspecified bounds, then we should have already
3700 given an error about it before we got here. */
3701 if (! TYPE_DOMAIN (type))
3702 return error_mark_node;
3704 index_type = TYPE_DOMAIN (type);
3705 min = TYPE_MIN_VALUE (index_type);
3706 max = TYPE_MAX_VALUE (index_type);
3708 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3709 if (!max)
3711 /* zero sized arrays are represented from C FE as complete types with
3712 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3713 them as min 0, max -1. */
3714 if (COMPLETE_TYPE_P (type)
3715 && integer_zerop (TYPE_SIZE (type))
3716 && integer_zerop (min))
3717 return build_int_cst (TREE_TYPE (min), -1);
3719 return error_mark_node;
3722 return (integer_zerop (min)
3723 ? max
3724 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3727 /* If arg is static -- a reference to an object in static storage -- then
3728 return the object. This is not the same as the C meaning of `static'.
3729 If arg isn't static, return NULL. */
3731 tree
3732 staticp (tree arg)
3734 switch (TREE_CODE (arg))
3736 case FUNCTION_DECL:
3737 /* Nested functions are static, even though taking their address will
3738 involve a trampoline as we unnest the nested function and create
3739 the trampoline on the tree level. */
3740 return arg;
3742 case VAR_DECL:
3743 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3744 && ! DECL_THREAD_LOCAL_P (arg)
3745 && ! DECL_DLLIMPORT_P (arg)
3746 ? arg : NULL);
3748 case CONST_DECL:
3749 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3750 ? arg : NULL);
3752 case CONSTRUCTOR:
3753 return TREE_STATIC (arg) ? arg : NULL;
3755 case LABEL_DECL:
3756 case STRING_CST:
3757 return arg;
3759 case COMPONENT_REF:
3760 /* If the thing being referenced is not a field, then it is
3761 something language specific. */
3762 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3764 /* If we are referencing a bitfield, we can't evaluate an
3765 ADDR_EXPR at compile time and so it isn't a constant. */
3766 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3767 return NULL;
3769 return staticp (TREE_OPERAND (arg, 0));
3771 case BIT_FIELD_REF:
3772 return NULL;
3774 case INDIRECT_REF:
3775 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3777 case ARRAY_REF:
3778 case ARRAY_RANGE_REF:
3779 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3780 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3781 return staticp (TREE_OPERAND (arg, 0));
3782 else
3783 return NULL;
3785 case COMPOUND_LITERAL_EXPR:
3786 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3788 default:
3789 return NULL;
3796 /* Return whether OP is a DECL whose address is function-invariant. */
3798 bool
3799 decl_address_invariant_p (const_tree op)
3801 /* The conditions below are slightly less strict than the one in
3802 staticp. */
3804 switch (TREE_CODE (op))
3806 case PARM_DECL:
3807 case RESULT_DECL:
3808 case LABEL_DECL:
3809 case FUNCTION_DECL:
3810 return true;
3812 case VAR_DECL:
3813 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3814 || DECL_THREAD_LOCAL_P (op)
3815 || DECL_CONTEXT (op) == current_function_decl
3816 || decl_function_context (op) == current_function_decl)
3817 return true;
3818 break;
3820 case CONST_DECL:
3821 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3822 || decl_function_context (op) == current_function_decl)
3823 return true;
3824 break;
3826 default:
3827 break;
3830 return false;
3833 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3835 bool
3836 decl_address_ip_invariant_p (const_tree op)
3838 /* The conditions below are slightly less strict than the one in
3839 staticp. */
3841 switch (TREE_CODE (op))
3843 case LABEL_DECL:
3844 case FUNCTION_DECL:
3845 case STRING_CST:
3846 return true;
3848 case VAR_DECL:
3849 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3850 && !DECL_DLLIMPORT_P (op))
3851 || DECL_THREAD_LOCAL_P (op))
3852 return true;
3853 break;
3855 case CONST_DECL:
3856 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3857 return true;
3858 break;
3860 default:
3861 break;
3864 return false;
3868 /* Return true if T is function-invariant (internal function, does
3869 not handle arithmetic; that's handled in skip_simple_arithmetic and
3870 tree_invariant_p). */
3872 static bool
3873 tree_invariant_p_1 (tree t)
3875 tree op;
3877 if (TREE_CONSTANT (t)
3878 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3879 return true;
3881 switch (TREE_CODE (t))
3883 case SAVE_EXPR:
3884 return true;
3886 case ADDR_EXPR:
3887 op = TREE_OPERAND (t, 0);
3888 while (handled_component_p (op))
3890 switch (TREE_CODE (op))
3892 case ARRAY_REF:
3893 case ARRAY_RANGE_REF:
3894 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3895 || TREE_OPERAND (op, 2) != NULL_TREE
3896 || TREE_OPERAND (op, 3) != NULL_TREE)
3897 return false;
3898 break;
3900 case COMPONENT_REF:
3901 if (TREE_OPERAND (op, 2) != NULL_TREE)
3902 return false;
3903 break;
3905 default:;
3907 op = TREE_OPERAND (op, 0);
3910 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3912 default:
3913 break;
3916 return false;
3919 /* Return true if T is function-invariant. */
3921 bool
3922 tree_invariant_p (tree t)
3924 tree inner = skip_simple_arithmetic (t);
3925 return tree_invariant_p_1 (inner);
3928 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3929 Do this to any expression which may be used in more than one place,
3930 but must be evaluated only once.
3932 Normally, expand_expr would reevaluate the expression each time.
3933 Calling save_expr produces something that is evaluated and recorded
3934 the first time expand_expr is called on it. Subsequent calls to
3935 expand_expr just reuse the recorded value.
3937 The call to expand_expr that generates code that actually computes
3938 the value is the first call *at compile time*. Subsequent calls
3939 *at compile time* generate code to use the saved value.
3940 This produces correct result provided that *at run time* control
3941 always flows through the insns made by the first expand_expr
3942 before reaching the other places where the save_expr was evaluated.
3943 You, the caller of save_expr, must make sure this is so.
3945 Constants, and certain read-only nodes, are returned with no
3946 SAVE_EXPR because that is safe. Expressions containing placeholders
3947 are not touched; see tree.def for an explanation of what these
3948 are used for. */
3950 tree
3951 save_expr (tree expr)
3953 tree inner;
3955 /* If the tree evaluates to a constant, then we don't want to hide that
3956 fact (i.e. this allows further folding, and direct checks for constants).
3957 However, a read-only object that has side effects cannot be bypassed.
3958 Since it is no problem to reevaluate literals, we just return the
3959 literal node. */
3960 inner = skip_simple_arithmetic (expr);
3961 if (TREE_CODE (inner) == ERROR_MARK)
3962 return inner;
3964 if (tree_invariant_p_1 (inner))
3965 return expr;
3967 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3968 it means that the size or offset of some field of an object depends on
3969 the value within another field.
3971 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3972 and some variable since it would then need to be both evaluated once and
3973 evaluated more than once. Front-ends must assure this case cannot
3974 happen by surrounding any such subexpressions in their own SAVE_EXPR
3975 and forcing evaluation at the proper time. */
3976 if (contains_placeholder_p (inner))
3977 return expr;
3979 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3981 /* This expression might be placed ahead of a jump to ensure that the
3982 value was computed on both sides of the jump. So make sure it isn't
3983 eliminated as dead. */
3984 TREE_SIDE_EFFECTS (expr) = 1;
3985 return expr;
3988 /* Look inside EXPR into any simple arithmetic operations. Return the
3989 outermost non-arithmetic or non-invariant node. */
3991 tree
3992 skip_simple_arithmetic (tree expr)
3994 /* We don't care about whether this can be used as an lvalue in this
3995 context. */
3996 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3997 expr = TREE_OPERAND (expr, 0);
3999 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
4000 a constant, it will be more efficient to not make another SAVE_EXPR since
4001 it will allow better simplification and GCSE will be able to merge the
4002 computations if they actually occur. */
4003 while (true)
4005 if (UNARY_CLASS_P (expr))
4006 expr = TREE_OPERAND (expr, 0);
4007 else if (BINARY_CLASS_P (expr))
4009 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4010 expr = TREE_OPERAND (expr, 0);
4011 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4012 expr = TREE_OPERAND (expr, 1);
4013 else
4014 break;
4016 else
4017 break;
4020 return expr;
4023 /* Look inside EXPR into simple arithmetic operations involving constants.
4024 Return the outermost non-arithmetic or non-constant node. */
4026 tree
4027 skip_simple_constant_arithmetic (tree expr)
4029 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4030 expr = TREE_OPERAND (expr, 0);
4032 while (true)
4034 if (UNARY_CLASS_P (expr))
4035 expr = TREE_OPERAND (expr, 0);
4036 else if (BINARY_CLASS_P (expr))
4038 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4039 expr = TREE_OPERAND (expr, 0);
4040 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4041 expr = TREE_OPERAND (expr, 1);
4042 else
4043 break;
4045 else
4046 break;
4049 return expr;
4052 /* Return which tree structure is used by T. */
4054 enum tree_node_structure_enum
4055 tree_node_structure (const_tree t)
4057 const enum tree_code code = TREE_CODE (t);
4058 return tree_node_structure_for_code (code);
4061 /* Set various status flags when building a CALL_EXPR object T. */
4063 static void
4064 process_call_operands (tree t)
4066 bool side_effects = TREE_SIDE_EFFECTS (t);
4067 bool read_only = false;
4068 int i = call_expr_flags (t);
4070 /* Calls have side-effects, except those to const or pure functions. */
4071 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4072 side_effects = true;
4073 /* Propagate TREE_READONLY of arguments for const functions. */
4074 if (i & ECF_CONST)
4075 read_only = true;
4077 if (!side_effects || read_only)
4078 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4080 tree op = TREE_OPERAND (t, i);
4081 if (op && TREE_SIDE_EFFECTS (op))
4082 side_effects = true;
4083 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4084 read_only = false;
4087 TREE_SIDE_EFFECTS (t) = side_effects;
4088 TREE_READONLY (t) = read_only;
4091 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4092 size or offset that depends on a field within a record. */
4094 bool
4095 contains_placeholder_p (const_tree exp)
4097 enum tree_code code;
4099 if (!exp)
4100 return false;
4102 code = TREE_CODE (exp);
4103 if (code == PLACEHOLDER_EXPR)
4104 return true;
4106 switch (TREE_CODE_CLASS (code))
4108 case tcc_reference:
4109 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4110 position computations since they will be converted into a
4111 WITH_RECORD_EXPR involving the reference, which will assume
4112 here will be valid. */
4113 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4115 case tcc_exceptional:
4116 if (code == TREE_LIST)
4117 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4118 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4119 break;
4121 case tcc_unary:
4122 case tcc_binary:
4123 case tcc_comparison:
4124 case tcc_expression:
4125 switch (code)
4127 case COMPOUND_EXPR:
4128 /* Ignoring the first operand isn't quite right, but works best. */
4129 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4131 case COND_EXPR:
4132 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4133 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4134 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4136 case SAVE_EXPR:
4137 /* The save_expr function never wraps anything containing
4138 a PLACEHOLDER_EXPR. */
4139 return false;
4141 default:
4142 break;
4145 switch (TREE_CODE_LENGTH (code))
4147 case 1:
4148 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4149 case 2:
4150 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4151 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4152 default:
4153 return false;
4156 case tcc_vl_exp:
4157 switch (code)
4159 case CALL_EXPR:
4161 const_tree arg;
4162 const_call_expr_arg_iterator iter;
4163 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4164 if (CONTAINS_PLACEHOLDER_P (arg))
4165 return true;
4166 return false;
4168 default:
4169 return false;
4172 default:
4173 return false;
4175 return false;
4178 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4179 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4180 field positions. */
4182 static bool
4183 type_contains_placeholder_1 (const_tree type)
4185 /* If the size contains a placeholder or the parent type (component type in
4186 the case of arrays) type involves a placeholder, this type does. */
4187 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4188 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4189 || (!POINTER_TYPE_P (type)
4190 && TREE_TYPE (type)
4191 && type_contains_placeholder_p (TREE_TYPE (type))))
4192 return true;
4194 /* Now do type-specific checks. Note that the last part of the check above
4195 greatly limits what we have to do below. */
4196 switch (TREE_CODE (type))
4198 case VOID_TYPE:
4199 case OPAQUE_TYPE:
4200 case COMPLEX_TYPE:
4201 case ENUMERAL_TYPE:
4202 case BOOLEAN_TYPE:
4203 case POINTER_TYPE:
4204 case OFFSET_TYPE:
4205 case REFERENCE_TYPE:
4206 case METHOD_TYPE:
4207 case FUNCTION_TYPE:
4208 case VECTOR_TYPE:
4209 case NULLPTR_TYPE:
4210 return false;
4212 case INTEGER_TYPE:
4213 case BITINT_TYPE:
4214 case REAL_TYPE:
4215 case FIXED_POINT_TYPE:
4216 /* Here we just check the bounds. */
4217 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4218 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4220 case ARRAY_TYPE:
4221 /* We have already checked the component type above, so just check
4222 the domain type. Flexible array members have a null domain. */
4223 return TYPE_DOMAIN (type) ?
4224 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4226 case RECORD_TYPE:
4227 case UNION_TYPE:
4228 case QUAL_UNION_TYPE:
4230 tree field;
4232 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4233 if (TREE_CODE (field) == FIELD_DECL
4234 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4235 || (TREE_CODE (type) == QUAL_UNION_TYPE
4236 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4237 || type_contains_placeholder_p (TREE_TYPE (field))))
4238 return true;
4240 return false;
4243 default:
4244 gcc_unreachable ();
4248 /* Wrapper around above function used to cache its result. */
4250 bool
4251 type_contains_placeholder_p (tree type)
4253 bool result;
4255 /* If the contains_placeholder_bits field has been initialized,
4256 then we know the answer. */
4257 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4258 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4260 /* Indicate that we've seen this type node, and the answer is false.
4261 This is what we want to return if we run into recursion via fields. */
4262 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4264 /* Compute the real value. */
4265 result = type_contains_placeholder_1 (type);
4267 /* Store the real value. */
4268 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4270 return result;
4273 /* Push tree EXP onto vector QUEUE if it is not already present. */
4275 static void
4276 push_without_duplicates (tree exp, vec<tree> *queue)
4278 unsigned int i;
4279 tree iter;
4281 FOR_EACH_VEC_ELT (*queue, i, iter)
4282 if (simple_cst_equal (iter, exp) == 1)
4283 break;
4285 if (!iter)
4286 queue->safe_push (exp);
4289 /* Given a tree EXP, find all occurrences of references to fields
4290 in a PLACEHOLDER_EXPR and place them in vector REFS without
4291 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4292 we assume here that EXP contains only arithmetic expressions
4293 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4294 argument list. */
4296 void
4297 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4299 enum tree_code code = TREE_CODE (exp);
4300 tree inner;
4301 int i;
4303 /* We handle TREE_LIST and COMPONENT_REF separately. */
4304 if (code == TREE_LIST)
4306 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4307 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4309 else if (code == COMPONENT_REF)
4311 for (inner = TREE_OPERAND (exp, 0);
4312 REFERENCE_CLASS_P (inner);
4313 inner = TREE_OPERAND (inner, 0))
4316 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4317 push_without_duplicates (exp, refs);
4318 else
4319 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4321 else
4322 switch (TREE_CODE_CLASS (code))
4324 case tcc_constant:
4325 break;
4327 case tcc_declaration:
4328 /* Variables allocated to static storage can stay. */
4329 if (!TREE_STATIC (exp))
4330 push_without_duplicates (exp, refs);
4331 break;
4333 case tcc_expression:
4334 /* This is the pattern built in ada/make_aligning_type. */
4335 if (code == ADDR_EXPR
4336 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4338 push_without_duplicates (exp, refs);
4339 break;
4342 /* Fall through. */
4344 case tcc_exceptional:
4345 case tcc_unary:
4346 case tcc_binary:
4347 case tcc_comparison:
4348 case tcc_reference:
4349 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4350 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4351 break;
4353 case tcc_vl_exp:
4354 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4355 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4356 break;
4358 default:
4359 gcc_unreachable ();
4363 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4364 return a tree with all occurrences of references to F in a
4365 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4366 CONST_DECLs. Note that we assume here that EXP contains only
4367 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4368 occurring only in their argument list. */
4370 tree
4371 substitute_in_expr (tree exp, tree f, tree r)
4373 enum tree_code code = TREE_CODE (exp);
4374 tree op0, op1, op2, op3;
4375 tree new_tree;
4377 /* We handle TREE_LIST and COMPONENT_REF separately. */
4378 if (code == TREE_LIST)
4380 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4381 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4382 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4383 return exp;
4385 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4387 else if (code == COMPONENT_REF)
4389 tree inner;
4391 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4392 and it is the right field, replace it with R. */
4393 for (inner = TREE_OPERAND (exp, 0);
4394 REFERENCE_CLASS_P (inner);
4395 inner = TREE_OPERAND (inner, 0))
4398 /* The field. */
4399 op1 = TREE_OPERAND (exp, 1);
4401 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4402 return r;
4404 /* If this expression hasn't been completed let, leave it alone. */
4405 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4406 return exp;
4408 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4409 if (op0 == TREE_OPERAND (exp, 0))
4410 return exp;
4412 new_tree
4413 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4415 else
4416 switch (TREE_CODE_CLASS (code))
4418 case tcc_constant:
4419 return exp;
4421 case tcc_declaration:
4422 if (exp == f)
4423 return r;
4424 else
4425 return exp;
4427 case tcc_expression:
4428 if (exp == f)
4429 return r;
4431 /* Fall through. */
4433 case tcc_exceptional:
4434 case tcc_unary:
4435 case tcc_binary:
4436 case tcc_comparison:
4437 case tcc_reference:
4438 switch (TREE_CODE_LENGTH (code))
4440 case 0:
4441 return exp;
4443 case 1:
4444 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4445 if (op0 == TREE_OPERAND (exp, 0))
4446 return exp;
4448 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4449 break;
4451 case 2:
4452 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4453 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4455 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4456 return exp;
4458 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4459 break;
4461 case 3:
4462 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4463 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4464 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4466 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4467 && op2 == TREE_OPERAND (exp, 2))
4468 return exp;
4470 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4471 break;
4473 case 4:
4474 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4475 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4476 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4477 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4479 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4480 && op2 == TREE_OPERAND (exp, 2)
4481 && op3 == TREE_OPERAND (exp, 3))
4482 return exp;
4484 new_tree
4485 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4486 break;
4488 default:
4489 gcc_unreachable ();
4491 break;
4493 case tcc_vl_exp:
4495 int i;
4497 new_tree = NULL_TREE;
4499 /* If we are trying to replace F with a constant or with another
4500 instance of one of the arguments of the call, inline back
4501 functions which do nothing else than computing a value from
4502 the arguments they are passed. This makes it possible to
4503 fold partially or entirely the replacement expression. */
4504 if (code == CALL_EXPR)
4506 bool maybe_inline = false;
4507 if (CONSTANT_CLASS_P (r))
4508 maybe_inline = true;
4509 else
4510 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4511 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4513 maybe_inline = true;
4514 break;
4516 if (maybe_inline)
4518 tree t = maybe_inline_call_in_expr (exp);
4519 if (t)
4520 return SUBSTITUTE_IN_EXPR (t, f, r);
4524 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4526 tree op = TREE_OPERAND (exp, i);
4527 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4528 if (new_op != op)
4530 if (!new_tree)
4531 new_tree = copy_node (exp);
4532 TREE_OPERAND (new_tree, i) = new_op;
4536 if (new_tree)
4538 new_tree = fold (new_tree);
4539 if (TREE_CODE (new_tree) == CALL_EXPR)
4540 process_call_operands (new_tree);
4542 else
4543 return exp;
4545 break;
4547 default:
4548 gcc_unreachable ();
4551 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4553 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4554 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4556 return new_tree;
4559 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4560 for it within OBJ, a tree that is an object or a chain of references. */
4562 tree
4563 substitute_placeholder_in_expr (tree exp, tree obj)
4565 enum tree_code code = TREE_CODE (exp);
4566 tree op0, op1, op2, op3;
4567 tree new_tree;
4569 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4570 in the chain of OBJ. */
4571 if (code == PLACEHOLDER_EXPR)
4573 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4574 tree elt;
4576 for (elt = obj; elt != 0;
4577 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4578 || TREE_CODE (elt) == COND_EXPR)
4579 ? TREE_OPERAND (elt, 1)
4580 : (REFERENCE_CLASS_P (elt)
4581 || UNARY_CLASS_P (elt)
4582 || BINARY_CLASS_P (elt)
4583 || VL_EXP_CLASS_P (elt)
4584 || EXPRESSION_CLASS_P (elt))
4585 ? TREE_OPERAND (elt, 0) : 0))
4586 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4587 return elt;
4589 for (elt = obj; elt != 0;
4590 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4591 || TREE_CODE (elt) == COND_EXPR)
4592 ? TREE_OPERAND (elt, 1)
4593 : (REFERENCE_CLASS_P (elt)
4594 || UNARY_CLASS_P (elt)
4595 || BINARY_CLASS_P (elt)
4596 || VL_EXP_CLASS_P (elt)
4597 || EXPRESSION_CLASS_P (elt))
4598 ? TREE_OPERAND (elt, 0) : 0))
4599 if (POINTER_TYPE_P (TREE_TYPE (elt))
4600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4601 == need_type))
4602 return fold_build1 (INDIRECT_REF, need_type, elt);
4604 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4605 survives until RTL generation, there will be an error. */
4606 return exp;
4609 /* TREE_LIST is special because we need to look at TREE_VALUE
4610 and TREE_CHAIN, not TREE_OPERANDS. */
4611 else if (code == TREE_LIST)
4613 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4614 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4615 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4616 return exp;
4618 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4620 else
4621 switch (TREE_CODE_CLASS (code))
4623 case tcc_constant:
4624 case tcc_declaration:
4625 return exp;
4627 case tcc_exceptional:
4628 case tcc_unary:
4629 case tcc_binary:
4630 case tcc_comparison:
4631 case tcc_expression:
4632 case tcc_reference:
4633 case tcc_statement:
4634 switch (TREE_CODE_LENGTH (code))
4636 case 0:
4637 return exp;
4639 case 1:
4640 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4641 if (op0 == TREE_OPERAND (exp, 0))
4642 return exp;
4644 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4645 break;
4647 case 2:
4648 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4649 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4651 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4652 return exp;
4654 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4655 break;
4657 case 3:
4658 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4659 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4660 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4662 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4663 && op2 == TREE_OPERAND (exp, 2))
4664 return exp;
4666 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4667 break;
4669 case 4:
4670 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4671 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4672 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4673 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4675 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4676 && op2 == TREE_OPERAND (exp, 2)
4677 && op3 == TREE_OPERAND (exp, 3))
4678 return exp;
4680 new_tree
4681 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4682 break;
4684 default:
4685 gcc_unreachable ();
4687 break;
4689 case tcc_vl_exp:
4691 int i;
4693 new_tree = NULL_TREE;
4695 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4697 tree op = TREE_OPERAND (exp, i);
4698 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4699 if (new_op != op)
4701 if (!new_tree)
4702 new_tree = copy_node (exp);
4703 TREE_OPERAND (new_tree, i) = new_op;
4707 if (new_tree)
4709 new_tree = fold (new_tree);
4710 if (TREE_CODE (new_tree) == CALL_EXPR)
4711 process_call_operands (new_tree);
4713 else
4714 return exp;
4716 break;
4718 default:
4719 gcc_unreachable ();
4722 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4724 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4725 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4727 return new_tree;
4731 /* Subroutine of stabilize_reference; this is called for subtrees of
4732 references. Any expression with side-effects must be put in a SAVE_EXPR
4733 to ensure that it is only evaluated once.
4735 We don't put SAVE_EXPR nodes around everything, because assigning very
4736 simple expressions to temporaries causes us to miss good opportunities
4737 for optimizations. Among other things, the opportunity to fold in the
4738 addition of a constant into an addressing mode often gets lost, e.g.
4739 "y[i+1] += x;". In general, we take the approach that we should not make
4740 an assignment unless we are forced into it - i.e., that any non-side effect
4741 operator should be allowed, and that cse should take care of coalescing
4742 multiple utterances of the same expression should that prove fruitful. */
4744 static tree
4745 stabilize_reference_1 (tree e)
4747 tree result;
4748 enum tree_code code = TREE_CODE (e);
4750 /* We cannot ignore const expressions because it might be a reference
4751 to a const array but whose index contains side-effects. But we can
4752 ignore things that are actual constant or that already have been
4753 handled by this function. */
4755 if (tree_invariant_p (e))
4756 return e;
4758 switch (TREE_CODE_CLASS (code))
4760 case tcc_exceptional:
4761 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4762 have side-effects. */
4763 if (code == STATEMENT_LIST)
4764 return save_expr (e);
4765 /* FALLTHRU */
4766 case tcc_type:
4767 case tcc_declaration:
4768 case tcc_comparison:
4769 case tcc_statement:
4770 case tcc_expression:
4771 case tcc_reference:
4772 case tcc_vl_exp:
4773 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4774 so that it will only be evaluated once. */
4775 /* The reference (r) and comparison (<) classes could be handled as
4776 below, but it is generally faster to only evaluate them once. */
4777 if (TREE_SIDE_EFFECTS (e))
4778 return save_expr (e);
4779 return e;
4781 case tcc_constant:
4782 /* Constants need no processing. In fact, we should never reach
4783 here. */
4784 return e;
4786 case tcc_binary:
4787 /* Division is slow and tends to be compiled with jumps,
4788 especially the division by powers of 2 that is often
4789 found inside of an array reference. So do it just once. */
4790 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4791 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4792 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4793 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4794 return save_expr (e);
4795 /* Recursively stabilize each operand. */
4796 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4797 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4798 break;
4800 case tcc_unary:
4801 /* Recursively stabilize each operand. */
4802 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4803 break;
4805 default:
4806 gcc_unreachable ();
4809 TREE_TYPE (result) = TREE_TYPE (e);
4810 TREE_READONLY (result) = TREE_READONLY (e);
4811 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4812 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4814 return result;
4817 /* Stabilize a reference so that we can use it any number of times
4818 without causing its operands to be evaluated more than once.
4819 Returns the stabilized reference. This works by means of save_expr,
4820 so see the caveats in the comments about save_expr.
4822 Also allows conversion expressions whose operands are references.
4823 Any other kind of expression is returned unchanged. */
4825 tree
4826 stabilize_reference (tree ref)
4828 tree result;
4829 enum tree_code code = TREE_CODE (ref);
4831 switch (code)
4833 case VAR_DECL:
4834 case PARM_DECL:
4835 case RESULT_DECL:
4836 /* No action is needed in this case. */
4837 return ref;
4839 CASE_CONVERT:
4840 case FLOAT_EXPR:
4841 case FIX_TRUNC_EXPR:
4842 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4843 break;
4845 case INDIRECT_REF:
4846 result = build_nt (INDIRECT_REF,
4847 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4848 break;
4850 case COMPONENT_REF:
4851 result = build_nt (COMPONENT_REF,
4852 stabilize_reference (TREE_OPERAND (ref, 0)),
4853 TREE_OPERAND (ref, 1), NULL_TREE);
4854 break;
4856 case BIT_FIELD_REF:
4857 result = build_nt (BIT_FIELD_REF,
4858 stabilize_reference (TREE_OPERAND (ref, 0)),
4859 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4860 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4861 break;
4863 case ARRAY_REF:
4864 result = build_nt (ARRAY_REF,
4865 stabilize_reference (TREE_OPERAND (ref, 0)),
4866 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4867 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4868 break;
4870 case ARRAY_RANGE_REF:
4871 result = build_nt (ARRAY_RANGE_REF,
4872 stabilize_reference (TREE_OPERAND (ref, 0)),
4873 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4874 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4875 break;
4877 case COMPOUND_EXPR:
4878 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4879 it wouldn't be ignored. This matters when dealing with
4880 volatiles. */
4881 return stabilize_reference_1 (ref);
4883 /* If arg isn't a kind of lvalue we recognize, make no change.
4884 Caller should recognize the error for an invalid lvalue. */
4885 default:
4886 return ref;
4888 case ERROR_MARK:
4889 return error_mark_node;
4892 TREE_TYPE (result) = TREE_TYPE (ref);
4893 TREE_READONLY (result) = TREE_READONLY (ref);
4894 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4895 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4896 protected_set_expr_location (result, EXPR_LOCATION (ref));
4898 return result;
4901 /* Low-level constructors for expressions. */
4903 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4904 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4906 void
4907 recompute_tree_invariant_for_addr_expr (tree t)
4909 tree node;
4910 bool tc = true, se = false;
4912 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4914 /* We started out assuming this address is both invariant and constant, but
4915 does not have side effects. Now go down any handled components and see if
4916 any of them involve offsets that are either non-constant or non-invariant.
4917 Also check for side-effects.
4919 ??? Note that this code makes no attempt to deal with the case where
4920 taking the address of something causes a copy due to misalignment. */
4922 #define UPDATE_FLAGS(NODE) \
4923 do { tree _node = (NODE); \
4924 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4925 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4927 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4928 node = TREE_OPERAND (node, 0))
4930 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4931 array reference (probably made temporarily by the G++ front end),
4932 so ignore all the operands. */
4933 if ((TREE_CODE (node) == ARRAY_REF
4934 || TREE_CODE (node) == ARRAY_RANGE_REF)
4935 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4937 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4938 if (TREE_OPERAND (node, 2))
4939 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4940 if (TREE_OPERAND (node, 3))
4941 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4943 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4944 FIELD_DECL, apparently. The G++ front end can put something else
4945 there, at least temporarily. */
4946 else if (TREE_CODE (node) == COMPONENT_REF
4947 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4949 if (TREE_OPERAND (node, 2))
4950 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4954 node = lang_hooks.expr_to_decl (node, &tc, &se);
4956 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4957 the address, since &(*a)->b is a form of addition. If it's a constant, the
4958 address is constant too. If it's a decl, its address is constant if the
4959 decl is static. Everything else is not constant and, furthermore,
4960 taking the address of a volatile variable is not volatile. */
4961 if (INDIRECT_REF_P (node)
4962 || TREE_CODE (node) == MEM_REF)
4963 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4964 else if (CONSTANT_CLASS_P (node))
4966 else if (DECL_P (node))
4967 tc &= (staticp (node) != NULL_TREE);
4968 else
4970 tc = false;
4971 se |= TREE_SIDE_EFFECTS (node);
4975 TREE_CONSTANT (t) = tc;
4976 TREE_SIDE_EFFECTS (t) = se;
4977 #undef UPDATE_FLAGS
4980 /* Build an expression of code CODE, data type TYPE, and operands as
4981 specified. Expressions and reference nodes can be created this way.
4982 Constants, decls, types and misc nodes cannot be.
4984 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4985 enough for all extant tree codes. */
4987 tree
4988 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4990 tree t;
4992 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4994 t = make_node (code PASS_MEM_STAT);
4995 TREE_TYPE (t) = tt;
4997 return t;
5000 tree
5001 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
5003 int length = sizeof (struct tree_exp);
5004 tree t;
5006 record_node_allocation_statistics (code, length);
5008 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5010 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5012 memset (t, 0, sizeof (struct tree_common));
5014 TREE_SET_CODE (t, code);
5016 TREE_TYPE (t) = type;
5017 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5018 TREE_OPERAND (t, 0) = node;
5019 if (node && !TYPE_P (node))
5021 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5022 TREE_READONLY (t) = TREE_READONLY (node);
5025 if (TREE_CODE_CLASS (code) == tcc_statement)
5027 if (code != DEBUG_BEGIN_STMT)
5028 TREE_SIDE_EFFECTS (t) = 1;
5030 else switch (code)
5032 case VA_ARG_EXPR:
5033 /* All of these have side-effects, no matter what their
5034 operands are. */
5035 TREE_SIDE_EFFECTS (t) = 1;
5036 TREE_READONLY (t) = 0;
5037 break;
5039 case INDIRECT_REF:
5040 /* Whether a dereference is readonly has nothing to do with whether
5041 its operand is readonly. */
5042 TREE_READONLY (t) = 0;
5043 break;
5045 case ADDR_EXPR:
5046 if (node)
5047 recompute_tree_invariant_for_addr_expr (t);
5048 break;
5050 default:
5051 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5052 && node && !TYPE_P (node)
5053 && TREE_CONSTANT (node))
5054 TREE_CONSTANT (t) = 1;
5055 if (TREE_CODE_CLASS (code) == tcc_reference
5056 && node && TREE_THIS_VOLATILE (node))
5057 TREE_THIS_VOLATILE (t) = 1;
5058 break;
5061 return t;
5064 #define PROCESS_ARG(N) \
5065 do { \
5066 TREE_OPERAND (t, N) = arg##N; \
5067 if (arg##N &&!TYPE_P (arg##N)) \
5069 if (TREE_SIDE_EFFECTS (arg##N)) \
5070 side_effects = 1; \
5071 if (!TREE_READONLY (arg##N) \
5072 && !CONSTANT_CLASS_P (arg##N)) \
5073 (void) (read_only = 0); \
5074 if (!TREE_CONSTANT (arg##N)) \
5075 (void) (constant = 0); \
5077 } while (0)
5079 tree
5080 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5082 bool constant, read_only, side_effects, div_by_zero;
5083 tree t;
5085 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5087 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5088 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5089 /* When sizetype precision doesn't match that of pointers
5090 we need to be able to build explicit extensions or truncations
5091 of the offset argument. */
5092 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5093 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5094 && TREE_CODE (arg1) == INTEGER_CST);
5096 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5097 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5098 && ptrofftype_p (TREE_TYPE (arg1)));
5100 t = make_node (code PASS_MEM_STAT);
5101 TREE_TYPE (t) = tt;
5103 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5104 result based on those same flags for the arguments. But if the
5105 arguments aren't really even `tree' expressions, we shouldn't be trying
5106 to do this. */
5108 /* Expressions without side effects may be constant if their
5109 arguments are as well. */
5110 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5111 || TREE_CODE_CLASS (code) == tcc_binary);
5112 read_only = 1;
5113 side_effects = TREE_SIDE_EFFECTS (t);
5115 switch (code)
5117 case TRUNC_DIV_EXPR:
5118 case CEIL_DIV_EXPR:
5119 case FLOOR_DIV_EXPR:
5120 case ROUND_DIV_EXPR:
5121 case EXACT_DIV_EXPR:
5122 case CEIL_MOD_EXPR:
5123 case FLOOR_MOD_EXPR:
5124 case ROUND_MOD_EXPR:
5125 case TRUNC_MOD_EXPR:
5126 div_by_zero = integer_zerop (arg1);
5127 break;
5128 default:
5129 div_by_zero = false;
5132 PROCESS_ARG (0);
5133 PROCESS_ARG (1);
5135 TREE_SIDE_EFFECTS (t) = side_effects;
5136 if (code == MEM_REF)
5138 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5140 tree o = TREE_OPERAND (arg0, 0);
5141 TREE_READONLY (t) = TREE_READONLY (o);
5142 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5145 else
5147 TREE_READONLY (t) = read_only;
5148 /* Don't mark X / 0 as constant. */
5149 TREE_CONSTANT (t) = constant && !div_by_zero;
5150 TREE_THIS_VOLATILE (t)
5151 = (TREE_CODE_CLASS (code) == tcc_reference
5152 && arg0 && TREE_THIS_VOLATILE (arg0));
5155 return t;
5159 tree
5160 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5161 tree arg2 MEM_STAT_DECL)
5163 bool constant, read_only, side_effects;
5164 tree t;
5166 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5167 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5169 t = make_node (code PASS_MEM_STAT);
5170 TREE_TYPE (t) = tt;
5172 read_only = 1;
5174 /* As a special exception, if COND_EXPR has NULL branches, we
5175 assume that it is a gimple statement and always consider
5176 it to have side effects. */
5177 if (code == COND_EXPR
5178 && tt == void_type_node
5179 && arg1 == NULL_TREE
5180 && arg2 == NULL_TREE)
5181 side_effects = true;
5182 else
5183 side_effects = TREE_SIDE_EFFECTS (t);
5185 PROCESS_ARG (0);
5186 PROCESS_ARG (1);
5187 PROCESS_ARG (2);
5189 if (code == COND_EXPR)
5190 TREE_READONLY (t) = read_only;
5192 TREE_SIDE_EFFECTS (t) = side_effects;
5193 TREE_THIS_VOLATILE (t)
5194 = (TREE_CODE_CLASS (code) == tcc_reference
5195 && arg0 && TREE_THIS_VOLATILE (arg0));
5197 return t;
5200 tree
5201 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5202 tree arg2, tree arg3 MEM_STAT_DECL)
5204 bool constant, read_only, side_effects;
5205 tree t;
5207 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5209 t = make_node (code PASS_MEM_STAT);
5210 TREE_TYPE (t) = tt;
5212 side_effects = TREE_SIDE_EFFECTS (t);
5214 PROCESS_ARG (0);
5215 PROCESS_ARG (1);
5216 PROCESS_ARG (2);
5217 PROCESS_ARG (3);
5219 TREE_SIDE_EFFECTS (t) = side_effects;
5220 TREE_THIS_VOLATILE (t)
5221 = (TREE_CODE_CLASS (code) == tcc_reference
5222 && arg0 && TREE_THIS_VOLATILE (arg0));
5224 return t;
5227 tree
5228 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5229 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5231 bool constant, read_only, side_effects;
5232 tree t;
5234 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5236 t = make_node (code PASS_MEM_STAT);
5237 TREE_TYPE (t) = tt;
5239 side_effects = TREE_SIDE_EFFECTS (t);
5241 PROCESS_ARG (0);
5242 PROCESS_ARG (1);
5243 PROCESS_ARG (2);
5244 PROCESS_ARG (3);
5245 PROCESS_ARG (4);
5247 TREE_SIDE_EFFECTS (t) = side_effects;
5248 if (code == TARGET_MEM_REF)
5250 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5252 tree o = TREE_OPERAND (arg0, 0);
5253 TREE_READONLY (t) = TREE_READONLY (o);
5254 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5257 else
5258 TREE_THIS_VOLATILE (t)
5259 = (TREE_CODE_CLASS (code) == tcc_reference
5260 && arg0 && TREE_THIS_VOLATILE (arg0));
5262 return t;
5265 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5266 on the pointer PTR. */
5268 tree
5269 build_simple_mem_ref_loc (location_t loc, tree ptr)
5271 poly_int64 offset = 0;
5272 tree ptype = TREE_TYPE (ptr);
5273 tree tem;
5274 /* For convenience allow addresses that collapse to a simple base
5275 and offset. */
5276 if (TREE_CODE (ptr) == ADDR_EXPR
5277 && (handled_component_p (TREE_OPERAND (ptr, 0))
5278 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5280 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5281 gcc_assert (ptr);
5282 if (TREE_CODE (ptr) == MEM_REF)
5284 offset += mem_ref_offset (ptr).force_shwi ();
5285 ptr = TREE_OPERAND (ptr, 0);
5287 else
5288 ptr = build_fold_addr_expr (ptr);
5289 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5291 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5292 ptr, build_int_cst (ptype, offset));
5293 SET_EXPR_LOCATION (tem, loc);
5294 return tem;
5297 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5299 poly_offset_int
5300 mem_ref_offset (const_tree t)
5302 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5303 SIGNED);
5306 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5307 offsetted by OFFSET units. */
5309 tree
5310 build_invariant_address (tree type, tree base, poly_int64 offset)
5312 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5313 build_fold_addr_expr (base),
5314 build_int_cst (ptr_type_node, offset));
5315 tree addr = build1 (ADDR_EXPR, type, ref);
5316 recompute_tree_invariant_for_addr_expr (addr);
5317 return addr;
5320 /* Similar except don't specify the TREE_TYPE
5321 and leave the TREE_SIDE_EFFECTS as 0.
5322 It is permissible for arguments to be null,
5323 or even garbage if their values do not matter. */
5325 tree
5326 build_nt (enum tree_code code, ...)
5328 tree t;
5329 int length;
5330 int i;
5331 va_list p;
5333 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5335 va_start (p, code);
5337 t = make_node (code);
5338 length = TREE_CODE_LENGTH (code);
5340 for (i = 0; i < length; i++)
5341 TREE_OPERAND (t, i) = va_arg (p, tree);
5343 va_end (p);
5344 return t;
5347 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5348 tree vec. */
5350 tree
5351 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5353 tree ret, t;
5354 unsigned int ix;
5356 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5357 CALL_EXPR_FN (ret) = fn;
5358 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5359 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5360 CALL_EXPR_ARG (ret, ix) = t;
5361 return ret;
5364 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5365 and data type TYPE.
5366 We do NOT enter this node in any sort of symbol table.
5368 LOC is the location of the decl.
5370 layout_decl is used to set up the decl's storage layout.
5371 Other slots are initialized to 0 or null pointers. */
5373 tree
5374 build_decl (location_t loc, enum tree_code code, tree name,
5375 tree type MEM_STAT_DECL)
5377 tree t;
5379 t = make_node (code PASS_MEM_STAT);
5380 DECL_SOURCE_LOCATION (t) = loc;
5382 /* if (type == error_mark_node)
5383 type = integer_type_node; */
5384 /* That is not done, deliberately, so that having error_mark_node
5385 as the type can suppress useless errors in the use of this variable. */
5387 DECL_NAME (t) = name;
5388 TREE_TYPE (t) = type;
5390 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5391 layout_decl (t, 0);
5393 return t;
5396 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5398 tree
5399 build_debug_expr_decl (tree type)
5401 tree vexpr = make_node (DEBUG_EXPR_DECL);
5402 DECL_ARTIFICIAL (vexpr) = 1;
5403 TREE_TYPE (vexpr) = type;
5404 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5405 return vexpr;
5408 /* Builds and returns function declaration with NAME and TYPE. */
5410 tree
5411 build_fn_decl (const char *name, tree type)
5413 tree id = get_identifier (name);
5414 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5416 DECL_EXTERNAL (decl) = 1;
5417 TREE_PUBLIC (decl) = 1;
5418 DECL_ARTIFICIAL (decl) = 1;
5419 TREE_NOTHROW (decl) = 1;
5421 return decl;
5424 vec<tree, va_gc> *all_translation_units;
5426 /* Builds a new translation-unit decl with name NAME, queues it in the
5427 global list of translation-unit decls and returns it. */
5429 tree
5430 build_translation_unit_decl (tree name)
5432 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5433 name, NULL_TREE);
5434 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5435 vec_safe_push (all_translation_units, tu);
5436 return tu;
5440 /* BLOCK nodes are used to represent the structure of binding contours
5441 and declarations, once those contours have been exited and their contents
5442 compiled. This information is used for outputting debugging info. */
5444 tree
5445 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5447 tree block = make_node (BLOCK);
5449 BLOCK_VARS (block) = vars;
5450 BLOCK_SUBBLOCKS (block) = subblocks;
5451 BLOCK_SUPERCONTEXT (block) = supercontext;
5452 BLOCK_CHAIN (block) = chain;
5453 return block;
5457 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5459 LOC is the location to use in tree T. */
5461 void
5462 protected_set_expr_location (tree t, location_t loc)
5464 if (CAN_HAVE_LOCATION_P (t))
5465 SET_EXPR_LOCATION (t, loc);
5466 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5468 t = expr_single (t);
5469 if (t && CAN_HAVE_LOCATION_P (t))
5470 SET_EXPR_LOCATION (t, loc);
5474 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5475 UNKNOWN_LOCATION. */
5477 void
5478 protected_set_expr_location_if_unset (tree t, location_t loc)
5480 t = expr_single (t);
5481 if (t && !EXPR_HAS_LOCATION (t))
5482 protected_set_expr_location (t, loc);
5485 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5486 of the various TYPE_QUAL values. */
5488 static void
5489 set_type_quals (tree type, int type_quals)
5491 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5492 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5493 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5494 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5495 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5498 /* Returns true iff CAND and BASE have equivalent language-specific
5499 qualifiers. */
5501 bool
5502 check_lang_type (const_tree cand, const_tree base)
5504 if (lang_hooks.types.type_hash_eq == NULL)
5505 return true;
5506 /* type_hash_eq currently only applies to these types. */
5507 if (TREE_CODE (cand) != FUNCTION_TYPE
5508 && TREE_CODE (cand) != METHOD_TYPE)
5509 return true;
5510 return lang_hooks.types.type_hash_eq (cand, base);
5513 /* This function checks to see if TYPE matches the size one of the built-in
5514 atomic types, and returns that core atomic type. */
5516 static tree
5517 find_atomic_core_type (const_tree type)
5519 tree base_atomic_type;
5521 /* Only handle complete types. */
5522 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5523 return NULL_TREE;
5525 switch (tree_to_uhwi (TYPE_SIZE (type)))
5527 case 8:
5528 base_atomic_type = atomicQI_type_node;
5529 break;
5531 case 16:
5532 base_atomic_type = atomicHI_type_node;
5533 break;
5535 case 32:
5536 base_atomic_type = atomicSI_type_node;
5537 break;
5539 case 64:
5540 base_atomic_type = atomicDI_type_node;
5541 break;
5543 case 128:
5544 base_atomic_type = atomicTI_type_node;
5545 break;
5547 default:
5548 base_atomic_type = NULL_TREE;
5551 return base_atomic_type;
5554 /* Returns true iff unqualified CAND and BASE are equivalent. */
5556 bool
5557 check_base_type (const_tree cand, const_tree base)
5559 if (TYPE_NAME (cand) != TYPE_NAME (base)
5560 /* Apparently this is needed for Objective-C. */
5561 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5562 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5563 TYPE_ATTRIBUTES (base)))
5564 return false;
5565 /* Check alignment. */
5566 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5567 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5568 return true;
5569 /* Atomic types increase minimal alignment. We must to do so as well
5570 or we get duplicated canonical types. See PR88686. */
5571 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5573 /* See if this object can map to a basic atomic type. */
5574 tree atomic_type = find_atomic_core_type (cand);
5575 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5576 return true;
5578 return false;
5581 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5583 bool
5584 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5586 return (TYPE_QUALS (cand) == type_quals
5587 && check_base_type (cand, base)
5588 && check_lang_type (cand, base));
5591 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5593 static bool
5594 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5596 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5597 && TYPE_NAME (cand) == TYPE_NAME (base)
5598 /* Apparently this is needed for Objective-C. */
5599 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5600 /* Check alignment. */
5601 && TYPE_ALIGN (cand) == align
5602 /* Check this is a user-aligned type as build_aligned_type
5603 would create. */
5604 && TYPE_USER_ALIGN (cand)
5605 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5606 TYPE_ATTRIBUTES (base))
5607 && check_lang_type (cand, base));
5610 /* Return a version of the TYPE, qualified as indicated by the
5611 TYPE_QUALS, if one exists. If no qualified version exists yet,
5612 return NULL_TREE. */
5614 tree
5615 get_qualified_type (tree type, int type_quals)
5617 if (TYPE_QUALS (type) == type_quals)
5618 return type;
5620 tree mv = TYPE_MAIN_VARIANT (type);
5621 if (check_qualified_type (mv, type, type_quals))
5622 return mv;
5624 /* Search the chain of variants to see if there is already one there just
5625 like the one we need to have. If so, use that existing one. We must
5626 preserve the TYPE_NAME, since there is code that depends on this. */
5627 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5628 if (check_qualified_type (*tp, type, type_quals))
5630 /* Put the found variant at the head of the variant list so
5631 frequently searched variants get found faster. The C++ FE
5632 benefits greatly from this. */
5633 tree t = *tp;
5634 *tp = TYPE_NEXT_VARIANT (t);
5635 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5636 TYPE_NEXT_VARIANT (mv) = t;
5637 return t;
5640 return NULL_TREE;
5643 /* Like get_qualified_type, but creates the type if it does not
5644 exist. This function never returns NULL_TREE. */
5646 tree
5647 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5649 tree t;
5651 /* See if we already have the appropriate qualified variant. */
5652 t = get_qualified_type (type, type_quals);
5654 /* If not, build it. */
5655 if (!t)
5657 t = build_variant_type_copy (type PASS_MEM_STAT);
5658 set_type_quals (t, type_quals);
5660 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5662 /* See if this object can map to a basic atomic type. */
5663 tree atomic_type = find_atomic_core_type (type);
5664 if (atomic_type)
5666 /* Ensure the alignment of this type is compatible with
5667 the required alignment of the atomic type. */
5668 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5669 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5673 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5674 /* Propagate structural equality. */
5675 SET_TYPE_STRUCTURAL_EQUALITY (t);
5676 else if (TYPE_CANONICAL (type) != type)
5677 /* Build the underlying canonical type, since it is different
5678 from TYPE. */
5680 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5681 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5683 else
5684 /* T is its own canonical type. */
5685 TYPE_CANONICAL (t) = t;
5689 return t;
5692 /* Create a variant of type T with alignment ALIGN which
5693 is measured in bits. */
5695 tree
5696 build_aligned_type (tree type, unsigned int align)
5698 tree t;
5700 if (TYPE_PACKED (type)
5701 || TYPE_ALIGN (type) == align)
5702 return type;
5704 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5705 if (check_aligned_type (t, type, align))
5706 return t;
5708 t = build_variant_type_copy (type);
5709 SET_TYPE_ALIGN (t, align);
5710 TYPE_USER_ALIGN (t) = 1;
5712 return t;
5715 /* Create a new distinct copy of TYPE. The new type is made its own
5716 MAIN_VARIANT. If TYPE requires structural equality checks, the
5717 resulting type requires structural equality checks; otherwise, its
5718 TYPE_CANONICAL points to itself. */
5720 tree
5721 build_distinct_type_copy (tree type MEM_STAT_DECL)
5723 tree t = copy_node (type PASS_MEM_STAT);
5725 TYPE_POINTER_TO (t) = 0;
5726 TYPE_REFERENCE_TO (t) = 0;
5728 /* Set the canonical type either to a new equivalence class, or
5729 propagate the need for structural equality checks. */
5730 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5731 SET_TYPE_STRUCTURAL_EQUALITY (t);
5732 else
5733 TYPE_CANONICAL (t) = t;
5735 /* Make it its own variant. */
5736 TYPE_MAIN_VARIANT (t) = t;
5737 TYPE_NEXT_VARIANT (t) = 0;
5739 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5740 whose TREE_TYPE is not t. This can also happen in the Ada
5741 frontend when using subtypes. */
5743 return t;
5746 /* Create a new variant of TYPE, equivalent but distinct. This is so
5747 the caller can modify it. TYPE_CANONICAL for the return type will
5748 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5749 are considered equal by the language itself (or that both types
5750 require structural equality checks). */
5752 tree
5753 build_variant_type_copy (tree type MEM_STAT_DECL)
5755 tree t, m = TYPE_MAIN_VARIANT (type);
5757 t = build_distinct_type_copy (type PASS_MEM_STAT);
5759 /* Since we're building a variant, assume that it is a non-semantic
5760 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5761 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5762 /* Type variants have no alias set defined. */
5763 TYPE_ALIAS_SET (t) = -1;
5765 /* Add the new type to the chain of variants of TYPE. */
5766 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5767 TYPE_NEXT_VARIANT (m) = t;
5768 TYPE_MAIN_VARIANT (t) = m;
5770 return t;
5773 /* Return true if the from tree in both tree maps are equal. */
5776 tree_map_base_eq (const void *va, const void *vb)
5778 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5779 *const b = (const struct tree_map_base *) vb;
5780 return (a->from == b->from);
5783 /* Hash a from tree in a tree_base_map. */
5785 unsigned int
5786 tree_map_base_hash (const void *item)
5788 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5791 /* Return true if this tree map structure is marked for garbage collection
5792 purposes. We simply return true if the from tree is marked, so that this
5793 structure goes away when the from tree goes away. */
5795 bool
5796 tree_map_base_marked_p (const void *p)
5798 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5801 /* Hash a from tree in a tree_map. */
5803 unsigned int
5804 tree_map_hash (const void *item)
5806 return (((const struct tree_map *) item)->hash);
5809 /* Hash a from tree in a tree_decl_map. */
5811 unsigned int
5812 tree_decl_map_hash (const void *item)
5814 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5817 /* Return the initialization priority for DECL. */
5819 priority_type
5820 decl_init_priority_lookup (tree decl)
5822 symtab_node *snode = symtab_node::get (decl);
5824 if (!snode)
5825 return DEFAULT_INIT_PRIORITY;
5826 return
5827 snode->get_init_priority ();
5830 /* Return the finalization priority for DECL. */
5832 priority_type
5833 decl_fini_priority_lookup (tree decl)
5835 cgraph_node *node = cgraph_node::get (decl);
5837 if (!node)
5838 return DEFAULT_INIT_PRIORITY;
5839 return
5840 node->get_fini_priority ();
5843 /* Set the initialization priority for DECL to PRIORITY. */
5845 void
5846 decl_init_priority_insert (tree decl, priority_type priority)
5848 struct symtab_node *snode;
5850 if (priority == DEFAULT_INIT_PRIORITY)
5852 snode = symtab_node::get (decl);
5853 if (!snode)
5854 return;
5856 else if (VAR_P (decl))
5857 snode = varpool_node::get_create (decl);
5858 else
5859 snode = cgraph_node::get_create (decl);
5860 snode->set_init_priority (priority);
5863 /* Set the finalization priority for DECL to PRIORITY. */
5865 void
5866 decl_fini_priority_insert (tree decl, priority_type priority)
5868 struct cgraph_node *node;
5870 if (priority == DEFAULT_INIT_PRIORITY)
5872 node = cgraph_node::get (decl);
5873 if (!node)
5874 return;
5876 else
5877 node = cgraph_node::get_create (decl);
5878 node->set_fini_priority (priority);
5881 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5883 static void
5884 print_debug_expr_statistics (void)
5886 fprintf (stderr, "DECL_DEBUG_EXPR hash: size " HOST_SIZE_T_PRINT_DEC ", "
5887 HOST_SIZE_T_PRINT_DEC " elements, %f collisions\n",
5888 (fmt_size_t) debug_expr_for_decl->size (),
5889 (fmt_size_t) debug_expr_for_decl->elements (),
5890 debug_expr_for_decl->collisions ());
5893 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5895 static void
5896 print_value_expr_statistics (void)
5898 fprintf (stderr, "DECL_VALUE_EXPR hash: size " HOST_SIZE_T_PRINT_DEC ", "
5899 HOST_SIZE_T_PRINT_DEC " elements, %f collisions\n",
5900 (fmt_size_t) value_expr_for_decl->size (),
5901 (fmt_size_t) value_expr_for_decl->elements (),
5902 value_expr_for_decl->collisions ());
5905 /* Lookup a debug expression for FROM, and return it if we find one. */
5907 tree
5908 decl_debug_expr_lookup (tree from)
5910 struct tree_decl_map *h, in;
5911 in.base.from = from;
5913 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5914 if (h)
5915 return h->to;
5916 return NULL_TREE;
5919 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5921 void
5922 decl_debug_expr_insert (tree from, tree to)
5924 struct tree_decl_map *h;
5926 h = ggc_alloc<tree_decl_map> ();
5927 h->base.from = from;
5928 h->to = to;
5929 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5932 /* Lookup a value expression for FROM, and return it if we find one. */
5934 tree
5935 decl_value_expr_lookup (tree from)
5937 struct tree_decl_map *h, in;
5938 in.base.from = from;
5940 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5941 if (h)
5942 return h->to;
5943 return NULL_TREE;
5946 /* Insert a mapping FROM->TO in the value expression hashtable. */
5948 void
5949 decl_value_expr_insert (tree from, tree to)
5951 struct tree_decl_map *h;
5953 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5954 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5956 h = ggc_alloc<tree_decl_map> ();
5957 h->base.from = from;
5958 h->to = to;
5959 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5962 /* Lookup a vector of debug arguments for FROM, and return it if we
5963 find one. */
5965 vec<tree, va_gc> **
5966 decl_debug_args_lookup (tree from)
5968 struct tree_vec_map *h, in;
5970 if (!DECL_HAS_DEBUG_ARGS_P (from))
5971 return NULL;
5972 gcc_checking_assert (debug_args_for_decl != NULL);
5973 in.base.from = from;
5974 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5975 if (h)
5976 return &h->to;
5977 return NULL;
5980 /* Insert a mapping FROM->empty vector of debug arguments in the value
5981 expression hashtable. */
5983 vec<tree, va_gc> **
5984 decl_debug_args_insert (tree from)
5986 struct tree_vec_map *h;
5987 tree_vec_map **loc;
5989 if (DECL_HAS_DEBUG_ARGS_P (from))
5990 return decl_debug_args_lookup (from);
5991 if (debug_args_for_decl == NULL)
5992 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5993 h = ggc_alloc<tree_vec_map> ();
5994 h->base.from = from;
5995 h->to = NULL;
5996 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5997 *loc = h;
5998 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5999 return &h->to;
6002 /* Hashing of types so that we don't make duplicates.
6003 The entry point is `type_hash_canon'. */
6005 /* Generate the default hash code for TYPE. This is designed for
6006 speed, rather than maximum entropy. */
6008 hashval_t
6009 type_hash_canon_hash (tree type)
6011 inchash::hash hstate;
6013 hstate.add_int (TREE_CODE (type));
6015 if (TREE_TYPE (type))
6016 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6018 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6019 /* Just the identifier is adequate to distinguish. */
6020 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6022 switch (TREE_CODE (type))
6024 case METHOD_TYPE:
6025 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6026 /* FALLTHROUGH. */
6027 case FUNCTION_TYPE:
6028 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6029 if (TREE_VALUE (t) != error_mark_node)
6030 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6031 break;
6033 case OFFSET_TYPE:
6034 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6035 break;
6037 case ARRAY_TYPE:
6039 if (TYPE_DOMAIN (type))
6040 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6041 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6043 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6044 hstate.add_object (typeless);
6047 break;
6049 case INTEGER_TYPE:
6051 tree t = TYPE_MAX_VALUE (type);
6052 if (!t)
6053 t = TYPE_MIN_VALUE (type);
6054 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6055 hstate.add_object (TREE_INT_CST_ELT (t, i));
6056 break;
6059 case BITINT_TYPE:
6061 unsigned prec = TYPE_PRECISION (type);
6062 unsigned uns = TYPE_UNSIGNED (type);
6063 hstate.add_object (prec);
6064 hstate.add_int (uns);
6065 break;
6068 case REAL_TYPE:
6069 case FIXED_POINT_TYPE:
6071 unsigned prec = TYPE_PRECISION (type);
6072 hstate.add_object (prec);
6073 break;
6076 case VECTOR_TYPE:
6077 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6078 break;
6080 default:
6081 break;
6084 return hstate.end ();
6087 /* These are the Hashtable callback functions. */
6089 /* Returns true iff the types are equivalent. */
6091 bool
6092 type_cache_hasher::equal (type_hash *a, type_hash *b)
6094 /* First test the things that are the same for all types. */
6095 if (a->hash != b->hash
6096 || TREE_CODE (a->type) != TREE_CODE (b->type)
6097 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6098 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6099 TYPE_ATTRIBUTES (b->type))
6100 || (TREE_CODE (a->type) != COMPLEX_TYPE
6101 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6102 return false;
6104 /* Be careful about comparing arrays before and after the element type
6105 has been completed; don't compare TYPE_ALIGN unless both types are
6106 complete. */
6107 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6108 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6109 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6110 return false;
6112 switch (TREE_CODE (a->type))
6114 case VOID_TYPE:
6115 case OPAQUE_TYPE:
6116 case COMPLEX_TYPE:
6117 case POINTER_TYPE:
6118 case REFERENCE_TYPE:
6119 case NULLPTR_TYPE:
6120 return true;
6122 case VECTOR_TYPE:
6123 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6124 TYPE_VECTOR_SUBPARTS (b->type));
6126 case ENUMERAL_TYPE:
6127 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6128 && !(TYPE_VALUES (a->type)
6129 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6130 && TYPE_VALUES (b->type)
6131 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6132 && type_list_equal (TYPE_VALUES (a->type),
6133 TYPE_VALUES (b->type))))
6134 return false;
6136 /* fall through */
6138 case INTEGER_TYPE:
6139 case REAL_TYPE:
6140 case BOOLEAN_TYPE:
6141 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6142 return false;
6143 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6144 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6145 TYPE_MAX_VALUE (b->type)))
6146 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6147 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6148 TYPE_MIN_VALUE (b->type))));
6150 case BITINT_TYPE:
6151 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6152 return false;
6153 return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
6155 case FIXED_POINT_TYPE:
6156 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6158 case OFFSET_TYPE:
6159 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6161 case METHOD_TYPE:
6162 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6163 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6164 || (TYPE_ARG_TYPES (a->type)
6165 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6166 && TYPE_ARG_TYPES (b->type)
6167 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6168 && type_list_equal (TYPE_ARG_TYPES (a->type),
6169 TYPE_ARG_TYPES (b->type)))))
6170 break;
6171 return false;
6172 case ARRAY_TYPE:
6173 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6174 where the flag should be inherited from the element type
6175 and can change after ARRAY_TYPEs are created; on non-aggregates
6176 compare it and hash it, scalars will never have that flag set
6177 and we need to differentiate between arrays created by different
6178 front-ends or middle-end created arrays. */
6179 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6180 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6181 || (TYPE_TYPELESS_STORAGE (a->type)
6182 == TYPE_TYPELESS_STORAGE (b->type))));
6184 case RECORD_TYPE:
6185 case UNION_TYPE:
6186 case QUAL_UNION_TYPE:
6187 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6188 || (TYPE_FIELDS (a->type)
6189 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6190 && TYPE_FIELDS (b->type)
6191 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6192 && type_list_equal (TYPE_FIELDS (a->type),
6193 TYPE_FIELDS (b->type))));
6195 case FUNCTION_TYPE:
6196 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6197 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6198 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6199 || (TYPE_ARG_TYPES (a->type)
6200 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6201 && TYPE_ARG_TYPES (b->type)
6202 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6203 && type_list_equal (TYPE_ARG_TYPES (a->type),
6204 TYPE_ARG_TYPES (b->type))))
6205 break;
6206 return false;
6208 default:
6209 return false;
6212 if (lang_hooks.types.type_hash_eq != NULL)
6213 return lang_hooks.types.type_hash_eq (a->type, b->type);
6215 return true;
6218 /* Given TYPE, and HASHCODE its hash code, return the canonical
6219 object for an identical type if one already exists.
6220 Otherwise, return TYPE, and record it as the canonical object.
6222 To use this function, first create a type of the sort you want.
6223 Then compute its hash code from the fields of the type that
6224 make it different from other similar types.
6225 Then call this function and use the value. */
6227 tree
6228 type_hash_canon (unsigned int hashcode, tree type)
6230 type_hash in;
6231 type_hash **loc;
6233 /* The hash table only contains main variants, so ensure that's what we're
6234 being passed. */
6235 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6237 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6238 must call that routine before comparing TYPE_ALIGNs. */
6239 layout_type (type);
6241 in.hash = hashcode;
6242 in.type = type;
6244 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6245 if (*loc)
6247 tree t1 = ((type_hash *) *loc)->type;
6248 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6249 && t1 != type);
6250 if (TYPE_UID (type) + 1 == next_type_uid)
6251 --next_type_uid;
6252 /* Free also min/max values and the cache for integer
6253 types. This can't be done in free_node, as LTO frees
6254 those on its own. */
6255 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
6257 if (TYPE_MIN_VALUE (type)
6258 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6260 /* Zero is always in TYPE_CACHED_VALUES. */
6261 if (! TYPE_UNSIGNED (type))
6262 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6263 ggc_free (TYPE_MIN_VALUE (type));
6265 if (TYPE_MAX_VALUE (type)
6266 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6268 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6269 ggc_free (TYPE_MAX_VALUE (type));
6271 if (TYPE_CACHED_VALUES_P (type))
6272 ggc_free (TYPE_CACHED_VALUES (type));
6274 free_node (type);
6275 return t1;
6277 else
6279 struct type_hash *h;
6281 h = ggc_alloc<type_hash> ();
6282 h->hash = hashcode;
6283 h->type = type;
6284 *loc = h;
6286 return type;
6290 static void
6291 print_type_hash_statistics (void)
6293 fprintf (stderr, "Type hash: size " HOST_SIZE_T_PRINT_DEC ", "
6294 HOST_SIZE_T_PRINT_DEC " elements, %f collisions\n",
6295 (fmt_size_t) type_hash_table->size (),
6296 (fmt_size_t) type_hash_table->elements (),
6297 type_hash_table->collisions ());
6300 /* Given two lists of types
6301 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6302 return 1 if the lists contain the same types in the same order.
6303 Also, the TREE_PURPOSEs must match. */
6305 bool
6306 type_list_equal (const_tree l1, const_tree l2)
6308 const_tree t1, t2;
6310 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6311 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6312 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6313 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6314 && (TREE_TYPE (TREE_PURPOSE (t1))
6315 == TREE_TYPE (TREE_PURPOSE (t2))))))
6316 return false;
6318 return t1 == t2;
6321 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6322 given by TYPE. If the argument list accepts variable arguments,
6323 then this function counts only the ordinary arguments. */
6326 type_num_arguments (const_tree fntype)
6328 int i = 0;
6330 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6331 /* If the function does not take a variable number of arguments,
6332 the last element in the list will have type `void'. */
6333 if (VOID_TYPE_P (TREE_VALUE (t)))
6334 break;
6335 else
6336 ++i;
6338 return i;
6341 /* Return the type of the function TYPE's argument ARGNO if known.
6342 For vararg function's where ARGNO refers to one of the variadic
6343 arguments return null. Otherwise, return a void_type_node for
6344 out-of-bounds ARGNO. */
6346 tree
6347 type_argument_type (const_tree fntype, unsigned argno)
6349 /* Treat zero the same as an out-of-bounds argument number. */
6350 if (!argno)
6351 return void_type_node;
6353 function_args_iterator iter;
6355 tree argtype;
6356 unsigned i = 1;
6357 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6359 /* A vararg function's argument list ends in a null. Otherwise,
6360 an ordinary function's argument list ends with void. Return
6361 null if ARGNO refers to a vararg argument, void_type_node if
6362 it's out of bounds, and the formal argument type otherwise. */
6363 if (!argtype)
6364 break;
6366 if (i == argno || VOID_TYPE_P (argtype))
6367 return argtype;
6369 ++i;
6372 return NULL_TREE;
6375 /* True if integer constants T1 and T2
6376 represent the same constant value. */
6378 bool
6379 tree_int_cst_equal (const_tree t1, const_tree t2)
6381 if (t1 == t2)
6382 return true;
6384 if (t1 == 0 || t2 == 0)
6385 return false;
6387 STRIP_ANY_LOCATION_WRAPPER (t1);
6388 STRIP_ANY_LOCATION_WRAPPER (t2);
6390 if (TREE_CODE (t1) == INTEGER_CST
6391 && TREE_CODE (t2) == INTEGER_CST
6392 && wi::to_widest (t1) == wi::to_widest (t2))
6393 return true;
6395 return false;
6398 /* Return true if T is an INTEGER_CST whose numerical value (extended
6399 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6401 bool
6402 tree_fits_shwi_p (const_tree t)
6404 return (t != NULL_TREE
6405 && TREE_CODE (t) == INTEGER_CST
6406 && wi::fits_shwi_p (wi::to_widest (t)));
6409 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6410 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6412 bool
6413 tree_fits_poly_int64_p (const_tree t)
6415 if (t == NULL_TREE)
6416 return false;
6417 if (POLY_INT_CST_P (t))
6419 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6420 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6421 return false;
6422 return true;
6424 return (TREE_CODE (t) == INTEGER_CST
6425 && wi::fits_shwi_p (wi::to_widest (t)));
6428 /* Return true if T is an INTEGER_CST whose numerical value (extended
6429 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6431 bool
6432 tree_fits_uhwi_p (const_tree t)
6434 return (t != NULL_TREE
6435 && TREE_CODE (t) == INTEGER_CST
6436 && wi::fits_uhwi_p (wi::to_widest (t)));
6439 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6440 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6442 bool
6443 tree_fits_poly_uint64_p (const_tree t)
6445 if (t == NULL_TREE)
6446 return false;
6447 if (POLY_INT_CST_P (t))
6449 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6450 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6451 return false;
6452 return true;
6454 return (TREE_CODE (t) == INTEGER_CST
6455 && wi::fits_uhwi_p (wi::to_widest (t)));
6458 /* T is an INTEGER_CST whose numerical value (extended according to
6459 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6460 HOST_WIDE_INT. */
6462 HOST_WIDE_INT
6463 tree_to_shwi (const_tree t)
6465 gcc_assert (tree_fits_shwi_p (t));
6466 return TREE_INT_CST_LOW (t);
6469 /* T is an INTEGER_CST whose numerical value (extended according to
6470 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6471 HOST_WIDE_INT. */
6473 unsigned HOST_WIDE_INT
6474 tree_to_uhwi (const_tree t)
6476 gcc_assert (tree_fits_uhwi_p (t));
6477 return TREE_INT_CST_LOW (t);
6480 /* Return the most significant (sign) bit of T. */
6483 tree_int_cst_sign_bit (const_tree t)
6485 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6487 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6490 /* Return an indication of the sign of the integer constant T.
6491 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6492 Note that -1 will never be returned if T's type is unsigned. */
6495 tree_int_cst_sgn (const_tree t)
6497 if (wi::to_wide (t) == 0)
6498 return 0;
6499 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6500 return 1;
6501 else if (wi::neg_p (wi::to_wide (t)))
6502 return -1;
6503 else
6504 return 1;
6507 /* Return the minimum number of bits needed to represent VALUE in a
6508 signed or unsigned type, UNSIGNEDP says which. */
6510 unsigned int
6511 tree_int_cst_min_precision (tree value, signop sgn)
6513 /* If the value is negative, compute its negative minus 1. The latter
6514 adjustment is because the absolute value of the largest negative value
6515 is one larger than the largest positive value. This is equivalent to
6516 a bit-wise negation, so use that operation instead. */
6518 if (tree_int_cst_sgn (value) < 0)
6519 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6521 /* Return the number of bits needed, taking into account the fact
6522 that we need one more bit for a signed than unsigned type.
6523 If value is 0 or -1, the minimum precision is 1 no matter
6524 whether unsignedp is true or false. */
6526 if (integer_zerop (value))
6527 return 1;
6528 else
6529 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6532 /* Return truthvalue of whether T1 is the same tree structure as T2.
6533 Return 1 if they are the same.
6534 Return 0 if they are understandably different.
6535 Return -1 if either contains tree structure not understood by
6536 this function. */
6539 simple_cst_equal (const_tree t1, const_tree t2)
6541 enum tree_code code1, code2;
6542 int cmp;
6543 int i;
6545 if (t1 == t2)
6546 return 1;
6547 if (t1 == 0 || t2 == 0)
6548 return 0;
6550 /* For location wrappers to be the same, they must be at the same
6551 source location (and wrap the same thing). */
6552 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6554 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6555 return 0;
6556 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6559 code1 = TREE_CODE (t1);
6560 code2 = TREE_CODE (t2);
6562 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6564 if (CONVERT_EXPR_CODE_P (code2)
6565 || code2 == NON_LVALUE_EXPR)
6566 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6567 else
6568 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6571 else if (CONVERT_EXPR_CODE_P (code2)
6572 || code2 == NON_LVALUE_EXPR)
6573 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6575 if (code1 != code2)
6576 return 0;
6578 switch (code1)
6580 case INTEGER_CST:
6581 return wi::to_widest (t1) == wi::to_widest (t2);
6583 case REAL_CST:
6584 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6586 case FIXED_CST:
6587 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6589 case STRING_CST:
6590 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6591 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6592 TREE_STRING_LENGTH (t1)));
6594 case CONSTRUCTOR:
6596 unsigned HOST_WIDE_INT idx;
6597 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6598 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6600 if (vec_safe_length (v1) != vec_safe_length (v2))
6601 return false;
6603 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6604 /* ??? Should we handle also fields here? */
6605 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6606 return false;
6607 return true;
6610 case SAVE_EXPR:
6611 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6613 case CALL_EXPR:
6614 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6615 if (cmp <= 0)
6616 return cmp;
6617 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6618 return 0;
6620 const_tree arg1, arg2;
6621 const_call_expr_arg_iterator iter1, iter2;
6622 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6623 arg2 = first_const_call_expr_arg (t2, &iter2);
6624 arg1 && arg2;
6625 arg1 = next_const_call_expr_arg (&iter1),
6626 arg2 = next_const_call_expr_arg (&iter2))
6628 cmp = simple_cst_equal (arg1, arg2);
6629 if (cmp <= 0)
6630 return cmp;
6632 return arg1 == arg2;
6635 case TARGET_EXPR:
6636 /* Special case: if either target is an unallocated VAR_DECL,
6637 it means that it's going to be unified with whatever the
6638 TARGET_EXPR is really supposed to initialize, so treat it
6639 as being equivalent to anything. */
6640 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6641 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6642 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6643 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6644 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6645 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6646 cmp = 1;
6647 else
6648 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6650 if (cmp <= 0)
6651 return cmp;
6653 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6655 case WITH_CLEANUP_EXPR:
6656 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6657 if (cmp <= 0)
6658 return cmp;
6660 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6662 case COMPONENT_REF:
6663 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6664 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6666 return 0;
6668 case VAR_DECL:
6669 case PARM_DECL:
6670 case CONST_DECL:
6671 case FUNCTION_DECL:
6672 return 0;
6674 default:
6675 if (POLY_INT_CST_P (t1))
6676 /* A false return means maybe_ne rather than known_ne. */
6677 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6678 TYPE_SIGN (TREE_TYPE (t1))),
6679 poly_widest_int::from (poly_int_cst_value (t2),
6680 TYPE_SIGN (TREE_TYPE (t2))));
6681 break;
6684 /* This general rule works for most tree codes. All exceptions should be
6685 handled above. If this is a language-specific tree code, we can't
6686 trust what might be in the operand, so say we don't know
6687 the situation. */
6688 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6689 return -1;
6691 switch (TREE_CODE_CLASS (code1))
6693 case tcc_unary:
6694 case tcc_binary:
6695 case tcc_comparison:
6696 case tcc_expression:
6697 case tcc_reference:
6698 case tcc_statement:
6699 cmp = 1;
6700 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6702 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6703 if (cmp <= 0)
6704 return cmp;
6707 return cmp;
6709 default:
6710 return -1;
6714 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6715 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6716 than U, respectively. */
6719 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6721 if (tree_int_cst_sgn (t) < 0)
6722 return -1;
6723 else if (!tree_fits_uhwi_p (t))
6724 return 1;
6725 else if (TREE_INT_CST_LOW (t) == u)
6726 return 0;
6727 else if (TREE_INT_CST_LOW (t) < u)
6728 return -1;
6729 else
6730 return 1;
6733 /* Return true if SIZE represents a constant size that is in bounds of
6734 what the middle-end and the backend accepts (covering not more than
6735 half of the address-space).
6736 When PERR is non-null, set *PERR on failure to the description of
6737 why SIZE is not valid. */
6739 bool
6740 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6742 if (POLY_INT_CST_P (size))
6744 if (TREE_OVERFLOW (size))
6745 return false;
6746 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6747 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6748 return false;
6749 return true;
6752 cst_size_error error;
6753 if (!perr)
6754 perr = &error;
6756 if (TREE_CODE (size) != INTEGER_CST)
6758 *perr = cst_size_not_constant;
6759 return false;
6762 if (TREE_OVERFLOW_P (size))
6764 *perr = cst_size_overflow;
6765 return false;
6768 if (tree_int_cst_sgn (size) < 0)
6770 *perr = cst_size_negative;
6771 return false;
6773 if (!tree_fits_uhwi_p (size)
6774 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6775 < wi::to_widest (size) * 2))
6777 *perr = cst_size_too_big;
6778 return false;
6781 return true;
6784 /* Return the precision of the type, or for a complex or vector type the
6785 precision of the type of its elements. */
6787 unsigned int
6788 element_precision (const_tree type)
6790 if (!TYPE_P (type))
6791 type = TREE_TYPE (type);
6792 enum tree_code code = TREE_CODE (type);
6793 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6794 type = TREE_TYPE (type);
6796 return TYPE_PRECISION (type);
6799 /* Return true if CODE represents an associative tree code. Otherwise
6800 return false. */
6801 bool
6802 associative_tree_code (enum tree_code code)
6804 switch (code)
6806 case BIT_IOR_EXPR:
6807 case BIT_AND_EXPR:
6808 case BIT_XOR_EXPR:
6809 case PLUS_EXPR:
6810 case MULT_EXPR:
6811 case MIN_EXPR:
6812 case MAX_EXPR:
6813 return true;
6815 default:
6816 break;
6818 return false;
6821 /* Return true if CODE represents a commutative tree code. Otherwise
6822 return false. */
6823 bool
6824 commutative_tree_code (enum tree_code code)
6826 switch (code)
6828 case PLUS_EXPR:
6829 case MULT_EXPR:
6830 case MULT_HIGHPART_EXPR:
6831 case MIN_EXPR:
6832 case MAX_EXPR:
6833 case BIT_IOR_EXPR:
6834 case BIT_XOR_EXPR:
6835 case BIT_AND_EXPR:
6836 case NE_EXPR:
6837 case EQ_EXPR:
6838 case UNORDERED_EXPR:
6839 case ORDERED_EXPR:
6840 case UNEQ_EXPR:
6841 case LTGT_EXPR:
6842 case TRUTH_AND_EXPR:
6843 case TRUTH_XOR_EXPR:
6844 case TRUTH_OR_EXPR:
6845 case WIDEN_MULT_EXPR:
6846 case VEC_WIDEN_MULT_HI_EXPR:
6847 case VEC_WIDEN_MULT_LO_EXPR:
6848 case VEC_WIDEN_MULT_EVEN_EXPR:
6849 case VEC_WIDEN_MULT_ODD_EXPR:
6850 return true;
6852 default:
6853 break;
6855 return false;
6858 /* Return true if CODE represents a ternary tree code for which the
6859 first two operands are commutative. Otherwise return false. */
6860 bool
6861 commutative_ternary_tree_code (enum tree_code code)
6863 switch (code)
6865 case WIDEN_MULT_PLUS_EXPR:
6866 case WIDEN_MULT_MINUS_EXPR:
6867 case DOT_PROD_EXPR:
6868 return true;
6870 default:
6871 break;
6873 return false;
6876 /* Returns true if CODE can overflow. */
6878 bool
6879 operation_can_overflow (enum tree_code code)
6881 switch (code)
6883 case PLUS_EXPR:
6884 case MINUS_EXPR:
6885 case MULT_EXPR:
6886 case LSHIFT_EXPR:
6887 /* Can overflow in various ways. */
6888 return true;
6889 case TRUNC_DIV_EXPR:
6890 case EXACT_DIV_EXPR:
6891 case FLOOR_DIV_EXPR:
6892 case CEIL_DIV_EXPR:
6893 /* For INT_MIN / -1. */
6894 return true;
6895 case NEGATE_EXPR:
6896 case ABS_EXPR:
6897 /* For -INT_MIN. */
6898 return true;
6899 default:
6900 /* These operators cannot overflow. */
6901 return false;
6905 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6906 ftrapv doesn't generate trapping insns for CODE. */
6908 bool
6909 operation_no_trapping_overflow (tree type, enum tree_code code)
6911 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6913 /* We don't generate instructions that trap on overflow for complex or vector
6914 types. */
6915 if (!INTEGRAL_TYPE_P (type))
6916 return true;
6918 if (!TYPE_OVERFLOW_TRAPS (type))
6919 return true;
6921 switch (code)
6923 case PLUS_EXPR:
6924 case MINUS_EXPR:
6925 case MULT_EXPR:
6926 case NEGATE_EXPR:
6927 case ABS_EXPR:
6928 /* These operators can overflow, and -ftrapv generates trapping code for
6929 these. */
6930 return false;
6931 case TRUNC_DIV_EXPR:
6932 case EXACT_DIV_EXPR:
6933 case FLOOR_DIV_EXPR:
6934 case CEIL_DIV_EXPR:
6935 case LSHIFT_EXPR:
6936 /* These operators can overflow, but -ftrapv does not generate trapping
6937 code for these. */
6938 return true;
6939 default:
6940 /* These operators cannot overflow. */
6941 return true;
6945 /* Constructors for pointer, array and function types.
6946 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6947 constructed by language-dependent code, not here.) */
6949 /* Construct, lay out and return the type of pointers to TO_TYPE with
6950 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6951 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6952 indicate this type can reference all of memory. If such a type has
6953 already been constructed, reuse it. */
6955 tree
6956 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6957 bool can_alias_all)
6959 tree t;
6960 bool could_alias = can_alias_all;
6962 if (to_type == error_mark_node)
6963 return error_mark_node;
6965 if (mode == VOIDmode)
6967 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6968 mode = targetm.addr_space.pointer_mode (as);
6971 /* If the pointed-to type has the may_alias attribute set, force
6972 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6973 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6974 can_alias_all = true;
6976 /* In some cases, languages will have things that aren't a POINTER_TYPE
6977 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6978 In that case, return that type without regard to the rest of our
6979 operands.
6981 ??? This is a kludge, but consistent with the way this function has
6982 always operated and there doesn't seem to be a good way to avoid this
6983 at the moment. */
6984 if (TYPE_POINTER_TO (to_type) != 0
6985 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6986 return TYPE_POINTER_TO (to_type);
6988 /* First, if we already have a type for pointers to TO_TYPE and it's
6989 the proper mode, use it. */
6990 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6991 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6992 return t;
6994 t = make_node (POINTER_TYPE);
6996 TREE_TYPE (t) = to_type;
6997 SET_TYPE_MODE (t, mode);
6998 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6999 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7000 TYPE_POINTER_TO (to_type) = t;
7002 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7003 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7004 SET_TYPE_STRUCTURAL_EQUALITY (t);
7005 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7006 TYPE_CANONICAL (t)
7007 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7008 mode, false);
7010 /* Lay out the type. This function has many callers that are concerned
7011 with expression-construction, and this simplifies them all. */
7012 layout_type (t);
7014 return t;
7017 /* By default build pointers in ptr_mode. */
7019 tree
7020 build_pointer_type (tree to_type)
7022 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7025 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7027 tree
7028 build_reference_type_for_mode (tree to_type, machine_mode mode,
7029 bool can_alias_all)
7031 tree t;
7032 bool could_alias = can_alias_all;
7034 if (to_type == error_mark_node)
7035 return error_mark_node;
7037 if (mode == VOIDmode)
7039 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7040 mode = targetm.addr_space.pointer_mode (as);
7043 /* If the pointed-to type has the may_alias attribute set, force
7044 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7045 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7046 can_alias_all = true;
7048 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7049 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7050 In that case, return that type without regard to the rest of our
7051 operands.
7053 ??? This is a kludge, but consistent with the way this function has
7054 always operated and there doesn't seem to be a good way to avoid this
7055 at the moment. */
7056 if (TYPE_REFERENCE_TO (to_type) != 0
7057 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7058 return TYPE_REFERENCE_TO (to_type);
7060 /* First, if we already have a type for pointers to TO_TYPE and it's
7061 the proper mode, use it. */
7062 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7063 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7064 return t;
7066 t = make_node (REFERENCE_TYPE);
7068 TREE_TYPE (t) = to_type;
7069 SET_TYPE_MODE (t, mode);
7070 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7071 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7072 TYPE_REFERENCE_TO (to_type) = t;
7074 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7075 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7076 SET_TYPE_STRUCTURAL_EQUALITY (t);
7077 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7078 TYPE_CANONICAL (t)
7079 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7080 mode, false);
7082 layout_type (t);
7084 return t;
7088 /* Build the node for the type of references-to-TO_TYPE by default
7089 in ptr_mode. */
7091 tree
7092 build_reference_type (tree to_type)
7094 return build_reference_type_for_mode (to_type, VOIDmode, false);
7097 #define MAX_INT_CACHED_PREC \
7098 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7099 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7101 static void
7102 clear_nonstandard_integer_type_cache (void)
7104 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7106 nonstandard_integer_type_cache[i] = NULL;
7110 /* Builds a signed or unsigned integer type of precision PRECISION.
7111 Used for C bitfields whose precision does not match that of
7112 built-in target types. */
7113 tree
7114 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7115 int unsignedp)
7117 tree itype, ret;
7119 if (unsignedp)
7120 unsignedp = MAX_INT_CACHED_PREC + 1;
7122 if (precision <= MAX_INT_CACHED_PREC)
7124 itype = nonstandard_integer_type_cache[precision + unsignedp];
7125 if (itype)
7126 return itype;
7129 itype = make_node (INTEGER_TYPE);
7130 TYPE_PRECISION (itype) = precision;
7132 if (unsignedp)
7133 fixup_unsigned_type (itype);
7134 else
7135 fixup_signed_type (itype);
7137 inchash::hash hstate;
7138 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7139 ret = type_hash_canon (hstate.end (), itype);
7140 if (precision <= MAX_INT_CACHED_PREC)
7141 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7143 return ret;
7146 #define MAX_BOOL_CACHED_PREC \
7147 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7148 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7150 /* Builds a boolean type of precision PRECISION.
7151 Used for boolean vectors to choose proper vector element size. */
7152 tree
7153 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7155 tree type;
7157 if (precision <= MAX_BOOL_CACHED_PREC)
7159 type = nonstandard_boolean_type_cache[precision];
7160 if (type)
7161 return type;
7164 type = make_node (BOOLEAN_TYPE);
7165 TYPE_PRECISION (type) = precision;
7166 fixup_signed_type (type);
7168 if (precision <= MAX_INT_CACHED_PREC)
7169 nonstandard_boolean_type_cache[precision] = type;
7171 return type;
7174 static GTY(()) vec<tree, va_gc> *bitint_type_cache;
7176 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7177 tree
7178 build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
7180 tree itype, ret;
7182 gcc_checking_assert (precision >= 1 + !unsignedp);
7184 if (unsignedp)
7185 unsignedp = MAX_INT_CACHED_PREC + 1;
7187 if (bitint_type_cache == NULL)
7188 vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
7190 if (precision <= MAX_INT_CACHED_PREC)
7192 itype = (*bitint_type_cache)[precision + unsignedp];
7193 if (itype)
7194 return itype;
7197 itype = make_node (BITINT_TYPE);
7198 TYPE_PRECISION (itype) = precision;
7200 if (unsignedp)
7201 fixup_unsigned_type (itype);
7202 else
7203 fixup_signed_type (itype);
7205 inchash::hash hstate;
7206 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7207 ret = type_hash_canon (hstate.end (), itype);
7208 if (precision <= MAX_INT_CACHED_PREC)
7209 (*bitint_type_cache)[precision + unsignedp] = ret;
7211 return ret;
7214 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7215 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7216 is true, reuse such a type that has already been constructed. */
7218 static tree
7219 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7221 tree itype = make_node (INTEGER_TYPE);
7223 TREE_TYPE (itype) = type;
7225 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7226 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7228 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7229 SET_TYPE_MODE (itype, TYPE_MODE (type));
7230 TYPE_SIZE (itype) = TYPE_SIZE (type);
7231 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7232 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7233 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7234 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7236 if (!shared)
7237 return itype;
7239 if ((TYPE_MIN_VALUE (itype)
7240 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7241 || (TYPE_MAX_VALUE (itype)
7242 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7244 /* Since we cannot reliably merge this type, we need to compare it using
7245 structural equality checks. */
7246 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7247 return itype;
7250 hashval_t hash = type_hash_canon_hash (itype);
7251 itype = type_hash_canon (hash, itype);
7253 return itype;
7256 /* Wrapper around build_range_type_1 with SHARED set to true. */
7258 tree
7259 build_range_type (tree type, tree lowval, tree highval)
7261 return build_range_type_1 (type, lowval, highval, true);
7264 /* Wrapper around build_range_type_1 with SHARED set to false. */
7266 tree
7267 build_nonshared_range_type (tree type, tree lowval, tree highval)
7269 return build_range_type_1 (type, lowval, highval, false);
7272 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7273 MAXVAL should be the maximum value in the domain
7274 (one less than the length of the array).
7276 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7277 We don't enforce this limit, that is up to caller (e.g. language front end).
7278 The limit exists because the result is a signed type and we don't handle
7279 sizes that use more than one HOST_WIDE_INT. */
7281 tree
7282 build_index_type (tree maxval)
7284 return build_range_type (sizetype, size_zero_node, maxval);
7287 /* Return true if the debug information for TYPE, a subtype, should be emitted
7288 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7289 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7290 debug info and doesn't reflect the source code. */
7292 bool
7293 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7295 tree base_type = TREE_TYPE (type), low, high;
7297 /* Subrange types have a base type which is an integral type. */
7298 if (!INTEGRAL_TYPE_P (base_type))
7299 return false;
7301 /* Get the real bounds of the subtype. */
7302 if (lang_hooks.types.get_subrange_bounds)
7303 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7304 else
7306 low = TYPE_MIN_VALUE (type);
7307 high = TYPE_MAX_VALUE (type);
7310 /* If the type and its base type have the same representation and the same
7311 name, then the type is not a subrange but a copy of the base type. */
7312 if ((TREE_CODE (base_type) == INTEGER_TYPE
7313 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7314 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7315 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7316 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7317 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7318 return false;
7320 if (lowval)
7321 *lowval = low;
7322 if (highval)
7323 *highval = high;
7324 return true;
7327 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7328 and number of elements specified by the range of values of INDEX_TYPE.
7329 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7330 If SHARED is true, reuse such a type that has already been constructed.
7331 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7333 tree
7334 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7335 bool shared, bool set_canonical)
7337 tree t;
7339 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7341 error ("arrays of functions are not meaningful");
7342 elt_type = integer_type_node;
7345 t = make_node (ARRAY_TYPE);
7346 TREE_TYPE (t) = elt_type;
7347 TYPE_DOMAIN (t) = index_type;
7348 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7349 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7350 layout_type (t);
7352 if (shared)
7354 hashval_t hash = type_hash_canon_hash (t);
7355 tree probe_type = t;
7356 t = type_hash_canon (hash, t);
7357 if (t != probe_type)
7358 return t;
7361 if (TYPE_CANONICAL (t) == t && set_canonical)
7363 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7364 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7365 || in_lto_p)
7366 SET_TYPE_STRUCTURAL_EQUALITY (t);
7367 else if (TYPE_CANONICAL (elt_type) != elt_type
7368 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7369 TYPE_CANONICAL (t)
7370 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7371 index_type
7372 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7373 typeless_storage, shared, set_canonical);
7376 return t;
7379 /* Wrapper around build_array_type_1 with SHARED set to true. */
7381 tree
7382 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7384 return
7385 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7388 /* Wrapper around build_array_type_1 with SHARED set to false. */
7390 tree
7391 build_nonshared_array_type (tree elt_type, tree index_type)
7393 return build_array_type_1 (elt_type, index_type, false, false, true);
7396 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7397 sizetype. */
7399 tree
7400 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7402 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7405 /* Computes the canonical argument types from the argument type list
7406 ARGTYPES.
7408 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7409 on entry to this function, or if any of the ARGTYPES are
7410 structural.
7412 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7413 true on entry to this function, or if any of the ARGTYPES are
7414 non-canonical.
7416 Returns a canonical argument list, which may be ARGTYPES when the
7417 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7418 true) or would not differ from ARGTYPES. */
7420 static tree
7421 maybe_canonicalize_argtypes (tree argtypes,
7422 bool *any_structural_p,
7423 bool *any_noncanonical_p)
7425 tree arg;
7426 bool any_noncanonical_argtypes_p = false;
7428 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7430 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7431 /* Fail gracefully by stating that the type is structural. */
7432 *any_structural_p = true;
7433 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7434 *any_structural_p = true;
7435 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7436 || TREE_PURPOSE (arg))
7437 /* If the argument has a default argument, we consider it
7438 non-canonical even though the type itself is canonical.
7439 That way, different variants of function and method types
7440 with default arguments will all point to the variant with
7441 no defaults as their canonical type. */
7442 any_noncanonical_argtypes_p = true;
7445 if (*any_structural_p)
7446 return argtypes;
7448 if (any_noncanonical_argtypes_p)
7450 /* Build the canonical list of argument types. */
7451 tree canon_argtypes = NULL_TREE;
7452 bool is_void = false;
7454 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7456 if (arg == void_list_node)
7457 is_void = true;
7458 else
7459 canon_argtypes = tree_cons (NULL_TREE,
7460 TYPE_CANONICAL (TREE_VALUE (arg)),
7461 canon_argtypes);
7464 canon_argtypes = nreverse (canon_argtypes);
7465 if (is_void)
7466 canon_argtypes = chainon (canon_argtypes, void_list_node);
7468 /* There is a non-canonical type. */
7469 *any_noncanonical_p = true;
7470 return canon_argtypes;
7473 /* The canonical argument types are the same as ARGTYPES. */
7474 return argtypes;
7477 /* Construct, lay out and return
7478 the type of functions returning type VALUE_TYPE
7479 given arguments of types ARG_TYPES.
7480 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7481 are data type nodes for the arguments of the function.
7482 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7483 variable-arguments function with (...) prototype (no named arguments).
7484 If such a type has already been constructed, reuse it. */
7486 tree
7487 build_function_type (tree value_type, tree arg_types,
7488 bool no_named_args_stdarg_p)
7490 tree t;
7491 inchash::hash hstate;
7492 bool any_structural_p, any_noncanonical_p;
7493 tree canon_argtypes;
7495 gcc_assert (arg_types != error_mark_node);
7497 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7499 error ("function return type cannot be function");
7500 value_type = integer_type_node;
7503 /* Make a node of the sort we want. */
7504 t = make_node (FUNCTION_TYPE);
7505 TREE_TYPE (t) = value_type;
7506 TYPE_ARG_TYPES (t) = arg_types;
7507 if (no_named_args_stdarg_p)
7509 gcc_assert (arg_types == NULL_TREE);
7510 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7513 /* If we already have such a type, use the old one. */
7514 hashval_t hash = type_hash_canon_hash (t);
7515 tree probe_type = t;
7516 t = type_hash_canon (hash, t);
7517 if (t != probe_type)
7518 return t;
7520 /* Set up the canonical type. */
7521 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7522 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7523 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7524 &any_structural_p,
7525 &any_noncanonical_p);
7526 if (any_structural_p)
7527 SET_TYPE_STRUCTURAL_EQUALITY (t);
7528 else if (any_noncanonical_p)
7529 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7530 canon_argtypes);
7532 if (!COMPLETE_TYPE_P (t))
7533 layout_type (t);
7534 return t;
7537 /* Build a function type. The RETURN_TYPE is the type returned by the
7538 function. If VAARGS is set, no void_type_node is appended to the
7539 list. ARGP must be always be terminated be a NULL_TREE. */
7541 static tree
7542 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7544 tree t, args, last;
7546 t = va_arg (argp, tree);
7547 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7548 args = tree_cons (NULL_TREE, t, args);
7550 if (vaargs)
7552 last = args;
7553 if (args != NULL_TREE)
7554 args = nreverse (args);
7555 gcc_assert (last != void_list_node);
7557 else if (args == NULL_TREE)
7558 args = void_list_node;
7559 else
7561 last = args;
7562 args = nreverse (args);
7563 TREE_CHAIN (last) = void_list_node;
7565 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7567 return args;
7570 /* Build a function type. The RETURN_TYPE is the type returned by the
7571 function. If additional arguments are provided, they are
7572 additional argument types. The list of argument types must always
7573 be terminated by NULL_TREE. */
7575 tree
7576 build_function_type_list (tree return_type, ...)
7578 tree args;
7579 va_list p;
7581 va_start (p, return_type);
7582 args = build_function_type_list_1 (false, return_type, p);
7583 va_end (p);
7584 return args;
7587 /* Build a variable argument function type. The RETURN_TYPE is the
7588 type returned by the function. If additional arguments are provided,
7589 they are additional argument types. The list of argument types must
7590 always be terminated by NULL_TREE. */
7592 tree
7593 build_varargs_function_type_list (tree return_type, ...)
7595 tree args;
7596 va_list p;
7598 va_start (p, return_type);
7599 args = build_function_type_list_1 (true, return_type, p);
7600 va_end (p);
7602 return args;
7605 /* Build a function type. RETURN_TYPE is the type returned by the
7606 function; VAARGS indicates whether the function takes varargs. The
7607 function takes N named arguments, the types of which are provided in
7608 ARG_TYPES. */
7610 static tree
7611 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7612 tree *arg_types)
7614 int i;
7615 tree t = vaargs ? NULL_TREE : void_list_node;
7617 for (i = n - 1; i >= 0; i--)
7618 t = tree_cons (NULL_TREE, arg_types[i], t);
7620 return build_function_type (return_type, t, vaargs && n == 0);
7623 /* Build a function type. RETURN_TYPE is the type returned by the
7624 function. The function takes N named arguments, the types of which
7625 are provided in ARG_TYPES. */
7627 tree
7628 build_function_type_array (tree return_type, int n, tree *arg_types)
7630 return build_function_type_array_1 (false, return_type, n, arg_types);
7633 /* Build a variable argument function type. RETURN_TYPE is the type
7634 returned by the function. The function takes N named arguments, the
7635 types of which are provided in ARG_TYPES. */
7637 tree
7638 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7640 return build_function_type_array_1 (true, return_type, n, arg_types);
7643 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7644 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7645 for the method. An implicit additional parameter (of type
7646 pointer-to-BASETYPE) is added to the ARGTYPES. */
7648 tree
7649 build_method_type_directly (tree basetype,
7650 tree rettype,
7651 tree argtypes)
7653 tree t;
7654 tree ptype;
7655 bool any_structural_p, any_noncanonical_p;
7656 tree canon_argtypes;
7658 /* Make a node of the sort we want. */
7659 t = make_node (METHOD_TYPE);
7661 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7662 TREE_TYPE (t) = rettype;
7663 ptype = build_pointer_type (basetype);
7665 /* The actual arglist for this function includes a "hidden" argument
7666 which is "this". Put it into the list of argument types. */
7667 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7668 TYPE_ARG_TYPES (t) = argtypes;
7670 /* If we already have such a type, use the old one. */
7671 hashval_t hash = type_hash_canon_hash (t);
7672 tree probe_type = t;
7673 t = type_hash_canon (hash, t);
7674 if (t != probe_type)
7675 return t;
7677 /* Set up the canonical type. */
7678 any_structural_p
7679 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7680 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7681 any_noncanonical_p
7682 = (TYPE_CANONICAL (basetype) != basetype
7683 || TYPE_CANONICAL (rettype) != rettype);
7684 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7685 &any_structural_p,
7686 &any_noncanonical_p);
7687 if (any_structural_p)
7688 SET_TYPE_STRUCTURAL_EQUALITY (t);
7689 else if (any_noncanonical_p)
7690 TYPE_CANONICAL (t)
7691 = build_method_type_directly (TYPE_CANONICAL (basetype),
7692 TYPE_CANONICAL (rettype),
7693 canon_argtypes);
7694 if (!COMPLETE_TYPE_P (t))
7695 layout_type (t);
7697 return t;
7700 /* Construct, lay out and return the type of methods belonging to class
7701 BASETYPE and whose arguments and values are described by TYPE.
7702 If that type exists already, reuse it.
7703 TYPE must be a FUNCTION_TYPE node. */
7705 tree
7706 build_method_type (tree basetype, tree type)
7708 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7710 return build_method_type_directly (basetype,
7711 TREE_TYPE (type),
7712 TYPE_ARG_TYPES (type));
7715 /* Construct, lay out and return the type of offsets to a value
7716 of type TYPE, within an object of type BASETYPE.
7717 If a suitable offset type exists already, reuse it. */
7719 tree
7720 build_offset_type (tree basetype, tree type)
7722 tree t;
7724 /* Make a node of the sort we want. */
7725 t = make_node (OFFSET_TYPE);
7727 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7728 TREE_TYPE (t) = type;
7730 /* If we already have such a type, use the old one. */
7731 hashval_t hash = type_hash_canon_hash (t);
7732 tree probe_type = t;
7733 t = type_hash_canon (hash, t);
7734 if (t != probe_type)
7735 return t;
7737 if (!COMPLETE_TYPE_P (t))
7738 layout_type (t);
7740 if (TYPE_CANONICAL (t) == t)
7742 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7743 || TYPE_STRUCTURAL_EQUALITY_P (type))
7744 SET_TYPE_STRUCTURAL_EQUALITY (t);
7745 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7746 || TYPE_CANONICAL (type) != type)
7747 TYPE_CANONICAL (t)
7748 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7749 TYPE_CANONICAL (type));
7752 return t;
7755 /* Create a complex type whose components are COMPONENT_TYPE.
7757 If NAMED is true, the type is given a TYPE_NAME. We do not always
7758 do so because this creates a DECL node and thus make the DECL_UIDs
7759 dependent on the type canonicalization hashtable, which is GC-ed,
7760 so the DECL_UIDs would not be stable wrt garbage collection. */
7762 tree
7763 build_complex_type (tree component_type, bool named)
7765 gcc_assert (INTEGRAL_TYPE_P (component_type)
7766 || SCALAR_FLOAT_TYPE_P (component_type)
7767 || FIXED_POINT_TYPE_P (component_type));
7769 /* Make a node of the sort we want. */
7770 tree probe = make_node (COMPLEX_TYPE);
7772 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7774 /* If we already have such a type, use the old one. */
7775 hashval_t hash = type_hash_canon_hash (probe);
7776 tree t = type_hash_canon (hash, probe);
7778 if (t == probe)
7780 /* We created a new type. The hash insertion will have laid
7781 out the type. We need to check the canonicalization and
7782 maybe set the name. */
7783 gcc_checking_assert (COMPLETE_TYPE_P (t)
7784 && !TYPE_NAME (t)
7785 && TYPE_CANONICAL (t) == t);
7787 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7788 SET_TYPE_STRUCTURAL_EQUALITY (t);
7789 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7790 TYPE_CANONICAL (t)
7791 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7793 /* We need to create a name, since complex is a fundamental type. */
7794 if (named)
7796 const char *name = NULL;
7798 if (TREE_TYPE (t) == char_type_node)
7799 name = "complex char";
7800 else if (TREE_TYPE (t) == signed_char_type_node)
7801 name = "complex signed char";
7802 else if (TREE_TYPE (t) == unsigned_char_type_node)
7803 name = "complex unsigned char";
7804 else if (TREE_TYPE (t) == short_integer_type_node)
7805 name = "complex short int";
7806 else if (TREE_TYPE (t) == short_unsigned_type_node)
7807 name = "complex short unsigned int";
7808 else if (TREE_TYPE (t) == integer_type_node)
7809 name = "complex int";
7810 else if (TREE_TYPE (t) == unsigned_type_node)
7811 name = "complex unsigned int";
7812 else if (TREE_TYPE (t) == long_integer_type_node)
7813 name = "complex long int";
7814 else if (TREE_TYPE (t) == long_unsigned_type_node)
7815 name = "complex long unsigned int";
7816 else if (TREE_TYPE (t) == long_long_integer_type_node)
7817 name = "complex long long int";
7818 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7819 name = "complex long long unsigned int";
7821 if (name != NULL)
7822 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7823 get_identifier (name), t);
7827 return build_qualified_type (t, TYPE_QUALS (component_type));
7830 /* If TYPE is a real or complex floating-point type and the target
7831 does not directly support arithmetic on TYPE then return the wider
7832 type to be used for arithmetic on TYPE. Otherwise, return
7833 NULL_TREE. */
7835 tree
7836 excess_precision_type (tree type)
7838 /* The target can give two different responses to the question of
7839 which excess precision mode it would like depending on whether we
7840 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7842 enum excess_precision_type requested_type
7843 = (flag_excess_precision == EXCESS_PRECISION_FAST
7844 ? EXCESS_PRECISION_TYPE_FAST
7845 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7846 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7848 enum flt_eval_method target_flt_eval_method
7849 = targetm.c.excess_precision (requested_type);
7851 /* The target should not ask for unpredictable float evaluation (though
7852 it might advertise that implicitly the evaluation is unpredictable,
7853 but we don't care about that here, it will have been reported
7854 elsewhere). If it does ask for unpredictable evaluation, we have
7855 nothing to do here. */
7856 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7858 /* Nothing to do. The target has asked for all types we know about
7859 to be computed with their native precision and range. */
7860 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7861 return NULL_TREE;
7863 /* The target will promote this type in a target-dependent way, so excess
7864 precision ought to leave it alone. */
7865 if (targetm.promoted_type (type) != NULL_TREE)
7866 return NULL_TREE;
7868 machine_mode float16_type_mode = (float16_type_node
7869 ? TYPE_MODE (float16_type_node)
7870 : VOIDmode);
7871 machine_mode bfloat16_type_mode = (bfloat16_type_node
7872 ? TYPE_MODE (bfloat16_type_node)
7873 : VOIDmode);
7874 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7875 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7877 switch (TREE_CODE (type))
7879 case REAL_TYPE:
7881 machine_mode type_mode = TYPE_MODE (type);
7882 switch (target_flt_eval_method)
7884 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7885 if (type_mode == float16_type_mode
7886 || type_mode == bfloat16_type_mode)
7887 return float_type_node;
7888 break;
7889 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7890 if (type_mode == float16_type_mode
7891 || type_mode == bfloat16_type_mode
7892 || type_mode == float_type_mode)
7893 return double_type_node;
7894 break;
7895 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7896 if (type_mode == float16_type_mode
7897 || type_mode == bfloat16_type_mode
7898 || type_mode == float_type_mode
7899 || type_mode == double_type_mode)
7900 return long_double_type_node;
7901 break;
7902 default:
7903 gcc_unreachable ();
7905 break;
7907 case COMPLEX_TYPE:
7909 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7910 return NULL_TREE;
7911 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7912 switch (target_flt_eval_method)
7914 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7915 if (type_mode == float16_type_mode
7916 || type_mode == bfloat16_type_mode)
7917 return complex_float_type_node;
7918 break;
7919 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7920 if (type_mode == float16_type_mode
7921 || type_mode == bfloat16_type_mode
7922 || type_mode == float_type_mode)
7923 return complex_double_type_node;
7924 break;
7925 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7926 if (type_mode == float16_type_mode
7927 || type_mode == bfloat16_type_mode
7928 || type_mode == float_type_mode
7929 || type_mode == double_type_mode)
7930 return complex_long_double_type_node;
7931 break;
7932 default:
7933 gcc_unreachable ();
7935 break;
7937 default:
7938 break;
7941 return NULL_TREE;
7944 /* Return OP, stripped of any conversions to wider types as much as is safe.
7945 Converting the value back to OP's type makes a value equivalent to OP.
7947 If FOR_TYPE is nonzero, we return a value which, if converted to
7948 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7950 OP must have integer, real or enumeral type. Pointers are not allowed!
7952 There are some cases where the obvious value we could return
7953 would regenerate to OP if converted to OP's type,
7954 but would not extend like OP to wider types.
7955 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7956 For example, if OP is (unsigned short)(signed char)-1,
7957 we avoid returning (signed char)-1 if FOR_TYPE is int,
7958 even though extending that to an unsigned short would regenerate OP,
7959 since the result of extending (signed char)-1 to (int)
7960 is different from (int) OP. */
7962 tree
7963 get_unwidened (tree op, tree for_type)
7965 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7966 tree type = TREE_TYPE (op);
7967 unsigned final_prec
7968 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7969 int uns
7970 = (for_type != 0 && for_type != type
7971 && final_prec > TYPE_PRECISION (type)
7972 && TYPE_UNSIGNED (type));
7973 tree win = op;
7975 while (CONVERT_EXPR_P (op))
7977 int bitschange;
7979 /* TYPE_PRECISION on vector types has different meaning
7980 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7981 so avoid them here. */
7982 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7983 break;
7985 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7986 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7988 /* Truncations are many-one so cannot be removed.
7989 Unless we are later going to truncate down even farther. */
7990 if (bitschange < 0
7991 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7992 break;
7994 /* See what's inside this conversion. If we decide to strip it,
7995 we will set WIN. */
7996 op = TREE_OPERAND (op, 0);
7998 /* If we have not stripped any zero-extensions (uns is 0),
7999 we can strip any kind of extension.
8000 If we have previously stripped a zero-extension,
8001 only zero-extensions can safely be stripped.
8002 Any extension can be stripped if the bits it would produce
8003 are all going to be discarded later by truncating to FOR_TYPE. */
8005 if (bitschange > 0)
8007 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8008 win = op;
8009 /* TYPE_UNSIGNED says whether this is a zero-extension.
8010 Let's avoid computing it if it does not affect WIN
8011 and if UNS will not be needed again. */
8012 if ((uns
8013 || CONVERT_EXPR_P (op))
8014 && TYPE_UNSIGNED (TREE_TYPE (op)))
8016 uns = 1;
8017 win = op;
8022 /* If we finally reach a constant see if it fits in sth smaller and
8023 in that case convert it. */
8024 if (TREE_CODE (win) == INTEGER_CST)
8026 tree wtype = TREE_TYPE (win);
8027 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8028 if (for_type)
8029 prec = MAX (prec, final_prec);
8030 if (prec < TYPE_PRECISION (wtype))
8032 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8033 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8034 win = fold_convert (t, win);
8038 return win;
8041 /* Return OP or a simpler expression for a narrower value
8042 which can be sign-extended or zero-extended to give back OP.
8043 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8044 or 0 if the value should be sign-extended. */
8046 tree
8047 get_narrower (tree op, int *unsignedp_ptr)
8049 int uns = 0;
8050 bool first = true;
8051 tree win = op;
8052 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8054 if (TREE_CODE (op) == COMPOUND_EXPR)
8057 op = TREE_OPERAND (op, 1);
8058 while (TREE_CODE (op) == COMPOUND_EXPR);
8059 tree ret = get_narrower (op, unsignedp_ptr);
8060 if (ret == op)
8061 return win;
8062 auto_vec <tree, 16> v;
8063 unsigned int i;
8064 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8065 op = TREE_OPERAND (op, 1))
8066 v.safe_push (op);
8067 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8068 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8069 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8070 ret);
8071 return ret;
8073 while (TREE_CODE (op) == NOP_EXPR)
8075 int bitschange
8076 = (TYPE_PRECISION (TREE_TYPE (op))
8077 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8079 /* Truncations are many-one so cannot be removed. */
8080 if (bitschange < 0)
8081 break;
8083 /* See what's inside this conversion. If we decide to strip it,
8084 we will set WIN. */
8086 if (bitschange > 0)
8088 op = TREE_OPERAND (op, 0);
8089 /* An extension: the outermost one can be stripped,
8090 but remember whether it is zero or sign extension. */
8091 if (first)
8092 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8093 /* Otherwise, if a sign extension has been stripped,
8094 only sign extensions can now be stripped;
8095 if a zero extension has been stripped, only zero-extensions. */
8096 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8097 break;
8098 first = false;
8100 else /* bitschange == 0 */
8102 /* A change in nominal type can always be stripped, but we must
8103 preserve the unsignedness. */
8104 if (first)
8105 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8106 first = false;
8107 op = TREE_OPERAND (op, 0);
8108 /* Keep trying to narrow, but don't assign op to win if it
8109 would turn an integral type into something else. */
8110 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8111 continue;
8114 win = op;
8117 if (TREE_CODE (op) == COMPONENT_REF
8118 /* Since type_for_size always gives an integer type. */
8119 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8120 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8121 /* Ensure field is laid out already. */
8122 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8123 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8125 unsigned HOST_WIDE_INT innerprec
8126 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8127 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8128 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8129 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8131 /* We can get this structure field in a narrower type that fits it,
8132 but the resulting extension to its nominal type (a fullword type)
8133 must satisfy the same conditions as for other extensions.
8135 Do this only for fields that are aligned (not bit-fields),
8136 because when bit-field insns will be used there is no
8137 advantage in doing this. */
8139 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8140 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8141 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8142 && type != 0)
8144 if (first)
8145 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8146 win = fold_convert (type, op);
8150 *unsignedp_ptr = uns;
8151 return win;
8154 /* Return true if integer constant C has a value that is permissible
8155 for TYPE, an integral type. */
8157 bool
8158 int_fits_type_p (const_tree c, const_tree type)
8160 tree type_low_bound, type_high_bound;
8161 bool ok_for_low_bound, ok_for_high_bound;
8162 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8164 /* Non-standard boolean types can have arbitrary precision but various
8165 transformations assume that they can only take values 0 and +/-1. */
8166 if (TREE_CODE (type) == BOOLEAN_TYPE)
8167 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8169 retry:
8170 type_low_bound = TYPE_MIN_VALUE (type);
8171 type_high_bound = TYPE_MAX_VALUE (type);
8173 /* If at least one bound of the type is a constant integer, we can check
8174 ourselves and maybe make a decision. If no such decision is possible, but
8175 this type is a subtype, try checking against that. Otherwise, use
8176 fits_to_tree_p, which checks against the precision.
8178 Compute the status for each possibly constant bound, and return if we see
8179 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8180 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8181 for "constant known to fit". */
8183 /* Check if c >= type_low_bound. */
8184 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8186 if (tree_int_cst_lt (c, type_low_bound))
8187 return false;
8188 ok_for_low_bound = true;
8190 else
8191 ok_for_low_bound = false;
8193 /* Check if c <= type_high_bound. */
8194 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8196 if (tree_int_cst_lt (type_high_bound, c))
8197 return false;
8198 ok_for_high_bound = true;
8200 else
8201 ok_for_high_bound = false;
8203 /* If the constant fits both bounds, the result is known. */
8204 if (ok_for_low_bound && ok_for_high_bound)
8205 return true;
8207 /* Perform some generic filtering which may allow making a decision
8208 even if the bounds are not constant. First, negative integers
8209 never fit in unsigned types, */
8210 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8211 return false;
8213 /* Second, narrower types always fit in wider ones. */
8214 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8215 return true;
8217 /* Third, unsigned integers with top bit set never fit signed types. */
8218 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8220 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8221 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8223 /* When a tree_cst is converted to a wide-int, the precision
8224 is taken from the type. However, if the precision of the
8225 mode underneath the type is smaller than that, it is
8226 possible that the value will not fit. The test below
8227 fails if any bit is set between the sign bit of the
8228 underlying mode and the top bit of the type. */
8229 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8230 return false;
8232 else if (wi::neg_p (wi::to_wide (c)))
8233 return false;
8236 /* If we haven't been able to decide at this point, there nothing more we
8237 can check ourselves here. Look at the base type if we have one and it
8238 has the same precision. */
8239 if (TREE_CODE (type) == INTEGER_TYPE
8240 && TREE_TYPE (type) != 0
8241 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8243 type = TREE_TYPE (type);
8244 goto retry;
8247 /* Or to fits_to_tree_p, if nothing else. */
8248 return wi::fits_to_tree_p (wi::to_wide (c), type);
8251 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8252 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8253 represented (assuming two's-complement arithmetic) within the bit
8254 precision of the type are returned instead. */
8256 void
8257 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8259 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8260 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8261 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8262 else
8264 if (TYPE_UNSIGNED (type))
8265 mpz_set_ui (min, 0);
8266 else
8268 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8269 wi::to_mpz (mn, min, SIGNED);
8273 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8274 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8275 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8276 else
8278 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8279 wi::to_mpz (mn, max, TYPE_SIGN (type));
8283 /* Return true if VAR is an automatic variable. */
8285 bool
8286 auto_var_p (const_tree var)
8288 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8289 || TREE_CODE (var) == PARM_DECL)
8290 && ! TREE_STATIC (var))
8291 || TREE_CODE (var) == RESULT_DECL);
8294 /* Return true if VAR is an automatic variable defined in function FN. */
8296 bool
8297 auto_var_in_fn_p (const_tree var, const_tree fn)
8299 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8300 && (auto_var_p (var)
8301 || TREE_CODE (var) == LABEL_DECL));
8304 /* Subprogram of following function. Called by walk_tree.
8306 Return *TP if it is an automatic variable or parameter of the
8307 function passed in as DATA. */
8309 static tree
8310 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8312 tree fn = (tree) data;
8314 if (TYPE_P (*tp))
8315 *walk_subtrees = 0;
8317 else if (DECL_P (*tp)
8318 && auto_var_in_fn_p (*tp, fn))
8319 return *tp;
8321 return NULL_TREE;
8324 /* Returns true if T is, contains, or refers to a type with variable
8325 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8326 arguments, but not the return type. If FN is nonzero, only return
8327 true if a modifier of the type or position of FN is a variable or
8328 parameter inside FN.
8330 This concept is more general than that of C99 'variably modified types':
8331 in C99, a struct type is never variably modified because a VLA may not
8332 appear as a structure member. However, in GNU C code like:
8334 struct S { int i[f()]; };
8336 is valid, and other languages may define similar constructs. */
8338 bool
8339 variably_modified_type_p (tree type, tree fn)
8341 tree t;
8343 /* Test if T is either variable (if FN is zero) or an expression containing
8344 a variable in FN. If TYPE isn't gimplified, return true also if
8345 gimplify_one_sizepos would gimplify the expression into a local
8346 variable. */
8347 #define RETURN_TRUE_IF_VAR(T) \
8348 do { tree _t = (T); \
8349 if (_t != NULL_TREE \
8350 && _t != error_mark_node \
8351 && !CONSTANT_CLASS_P (_t) \
8352 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8353 && (!fn \
8354 || (!TYPE_SIZES_GIMPLIFIED (type) \
8355 && (TREE_CODE (_t) != VAR_DECL \
8356 && !CONTAINS_PLACEHOLDER_P (_t))) \
8357 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8358 return true; } while (0)
8360 if (type == error_mark_node)
8361 return false;
8363 /* If TYPE itself has variable size, it is variably modified. */
8364 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8365 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8367 switch (TREE_CODE (type))
8369 case POINTER_TYPE:
8370 case REFERENCE_TYPE:
8371 case VECTOR_TYPE:
8372 /* Ada can have pointer types refering to themselves indirectly. */
8373 if (TREE_VISITED (type))
8374 return false;
8375 TREE_VISITED (type) = true;
8376 if (variably_modified_type_p (TREE_TYPE (type), fn))
8378 TREE_VISITED (type) = false;
8379 return true;
8381 TREE_VISITED (type) = false;
8382 break;
8384 case FUNCTION_TYPE:
8385 case METHOD_TYPE:
8386 /* If TYPE is a function type, it is variably modified if the
8387 return type is variably modified. */
8388 if (variably_modified_type_p (TREE_TYPE (type), fn))
8389 return true;
8390 break;
8392 case INTEGER_TYPE:
8393 case REAL_TYPE:
8394 case FIXED_POINT_TYPE:
8395 case ENUMERAL_TYPE:
8396 case BOOLEAN_TYPE:
8397 /* Scalar types are variably modified if their end points
8398 aren't constant. */
8399 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8400 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8401 break;
8403 case RECORD_TYPE:
8404 case UNION_TYPE:
8405 case QUAL_UNION_TYPE:
8406 /* We can't see if any of the fields are variably-modified by the
8407 definition we normally use, since that would produce infinite
8408 recursion via pointers. */
8409 /* This is variably modified if some field's type is. */
8410 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8411 if (TREE_CODE (t) == FIELD_DECL)
8413 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8414 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8415 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8417 /* If the type is a qualified union, then the DECL_QUALIFIER
8418 of fields can also be an expression containing a variable. */
8419 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8420 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8422 /* If the field is a qualified union, then it's only a container
8423 for what's inside so we look into it. That's necessary in LTO
8424 mode because the sizes of the field tested above have been set
8425 to PLACEHOLDER_EXPRs by free_lang_data. */
8426 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8427 && variably_modified_type_p (TREE_TYPE (t), fn))
8428 return true;
8430 break;
8432 case ARRAY_TYPE:
8433 /* Do not call ourselves to avoid infinite recursion. This is
8434 variably modified if the element type is. */
8435 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8436 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8437 break;
8439 default:
8440 break;
8443 /* The current language may have other cases to check, but in general,
8444 all other types are not variably modified. */
8445 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8447 #undef RETURN_TRUE_IF_VAR
8450 /* Given a DECL or TYPE, return the scope in which it was declared, or
8451 NULL_TREE if there is no containing scope. */
8453 tree
8454 get_containing_scope (const_tree t)
8456 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8459 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8461 const_tree
8462 get_ultimate_context (const_tree decl)
8464 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8466 if (TREE_CODE (decl) == BLOCK)
8467 decl = BLOCK_SUPERCONTEXT (decl);
8468 else
8469 decl = get_containing_scope (decl);
8471 return decl;
8474 /* Return the innermost context enclosing DECL that is
8475 a FUNCTION_DECL, or zero if none. */
8477 tree
8478 decl_function_context (const_tree decl)
8480 tree context;
8482 if (TREE_CODE (decl) == ERROR_MARK)
8483 return 0;
8485 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8486 where we look up the function at runtime. Such functions always take
8487 a first argument of type 'pointer to real context'.
8489 C++ should really be fixed to use DECL_CONTEXT for the real context,
8490 and use something else for the "virtual context". */
8491 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8492 context
8493 = TYPE_MAIN_VARIANT
8494 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8495 else
8496 context = DECL_CONTEXT (decl);
8498 while (context && TREE_CODE (context) != FUNCTION_DECL)
8500 if (TREE_CODE (context) == BLOCK)
8501 context = BLOCK_SUPERCONTEXT (context);
8502 else
8503 context = get_containing_scope (context);
8506 return context;
8509 /* Return the innermost context enclosing DECL that is
8510 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8511 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8513 tree
8514 decl_type_context (const_tree decl)
8516 tree context = DECL_CONTEXT (decl);
8518 while (context)
8519 switch (TREE_CODE (context))
8521 case NAMESPACE_DECL:
8522 case TRANSLATION_UNIT_DECL:
8523 return NULL_TREE;
8525 case RECORD_TYPE:
8526 case UNION_TYPE:
8527 case QUAL_UNION_TYPE:
8528 return context;
8530 case TYPE_DECL:
8531 case FUNCTION_DECL:
8532 context = DECL_CONTEXT (context);
8533 break;
8535 case BLOCK:
8536 context = BLOCK_SUPERCONTEXT (context);
8537 break;
8539 default:
8540 gcc_unreachable ();
8543 return NULL_TREE;
8546 /* CALL is a CALL_EXPR. Return the declaration for the function
8547 called, or NULL_TREE if the called function cannot be
8548 determined. */
8550 tree
8551 get_callee_fndecl (const_tree call)
8553 tree addr;
8555 if (call == error_mark_node)
8556 return error_mark_node;
8558 /* It's invalid to call this function with anything but a
8559 CALL_EXPR. */
8560 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8562 /* The first operand to the CALL is the address of the function
8563 called. */
8564 addr = CALL_EXPR_FN (call);
8566 /* If there is no function, return early. */
8567 if (addr == NULL_TREE)
8568 return NULL_TREE;
8570 STRIP_NOPS (addr);
8572 /* If this is a readonly function pointer, extract its initial value. */
8573 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8574 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8575 && DECL_INITIAL (addr))
8576 addr = DECL_INITIAL (addr);
8578 /* If the address is just `&f' for some function `f', then we know
8579 that `f' is being called. */
8580 if (TREE_CODE (addr) == ADDR_EXPR
8581 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8582 return TREE_OPERAND (addr, 0);
8584 /* We couldn't figure out what was being called. */
8585 return NULL_TREE;
8588 /* Return true when STMTs arguments and return value match those of FNDECL,
8589 a decl of a builtin function. */
8591 static bool
8592 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8594 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8597 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8598 fndecl = decl;
8600 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8601 if (gimple_form
8602 ? !useless_type_conversion_p (TREE_TYPE (call),
8603 TREE_TYPE (TREE_TYPE (fndecl)))
8604 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8605 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8606 return false;
8608 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8609 unsigned nargs = call_expr_nargs (call);
8610 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8612 /* Variadic args follow. */
8613 if (!targs)
8614 return true;
8615 tree arg = CALL_EXPR_ARG (call, i);
8616 tree type = TREE_VALUE (targs);
8617 if (gimple_form
8618 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8619 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8621 /* For pointer arguments be more forgiving, e.g. due to
8622 FILE * vs. fileptr_type_node, or say char * vs. const char *
8623 differences etc. */
8624 if (!gimple_form
8625 && POINTER_TYPE_P (type)
8626 && POINTER_TYPE_P (TREE_TYPE (arg))
8627 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8628 continue;
8629 /* char/short integral arguments are promoted to int
8630 by several frontends if targetm.calls.promote_prototypes
8631 is true. Allow such promotion too. */
8632 if (INTEGRAL_TYPE_P (type)
8633 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8634 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8635 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8636 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8637 && (gimple_form
8638 ? useless_type_conversion_p (integer_type_node,
8639 TREE_TYPE (arg))
8640 : tree_nop_conversion_p (integer_type_node,
8641 TREE_TYPE (arg))))
8642 continue;
8643 return false;
8646 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8647 return false;
8648 return true;
8651 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8652 return the associated function code, otherwise return CFN_LAST. */
8654 combined_fn
8655 get_call_combined_fn (const_tree call)
8657 /* It's invalid to call this function with anything but a CALL_EXPR. */
8658 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8660 if (!CALL_EXPR_FN (call))
8661 return as_combined_fn (CALL_EXPR_IFN (call));
8663 tree fndecl = get_callee_fndecl (call);
8664 if (fndecl
8665 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8666 && tree_builtin_call_types_compatible_p (call, fndecl))
8667 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8669 return CFN_LAST;
8672 /* Comparator of indices based on tree_node_counts. */
8674 static int
8675 tree_nodes_cmp (const void *p1, const void *p2)
8677 const unsigned *n1 = (const unsigned *)p1;
8678 const unsigned *n2 = (const unsigned *)p2;
8680 return tree_node_counts[*n1] - tree_node_counts[*n2];
8683 /* Comparator of indices based on tree_code_counts. */
8685 static int
8686 tree_codes_cmp (const void *p1, const void *p2)
8688 const unsigned *n1 = (const unsigned *)p1;
8689 const unsigned *n2 = (const unsigned *)p2;
8691 return tree_code_counts[*n1] - tree_code_counts[*n2];
8694 #define TREE_MEM_USAGE_SPACES 40
8696 /* Print debugging information about tree nodes generated during the compile,
8697 and any language-specific information. */
8699 void
8700 dump_tree_statistics (void)
8702 if (GATHER_STATISTICS)
8704 uint64_t total_nodes, total_bytes;
8705 fprintf (stderr, "\nKind Nodes Bytes\n");
8706 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8707 total_nodes = total_bytes = 0;
8710 auto_vec<unsigned> indices (all_kinds);
8711 for (unsigned i = 0; i < all_kinds; i++)
8712 indices.quick_push (i);
8713 indices.qsort (tree_nodes_cmp);
8715 for (unsigned i = 0; i < (int) all_kinds; i++)
8717 unsigned j = indices[i];
8718 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8719 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8720 SIZE_AMOUNT (tree_node_sizes[j]));
8721 total_nodes += tree_node_counts[j];
8722 total_bytes += tree_node_sizes[j];
8724 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8725 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8726 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8727 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8731 fprintf (stderr, "Code Nodes\n");
8732 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8734 auto_vec<unsigned> indices (MAX_TREE_CODES);
8735 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8736 indices.quick_push (i);
8737 indices.qsort (tree_codes_cmp);
8739 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8741 unsigned j = indices[i];
8742 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8743 get_tree_code_name ((enum tree_code) j),
8744 SIZE_AMOUNT (tree_code_counts[j]));
8746 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8747 fprintf (stderr, "\n");
8748 ssanames_print_statistics ();
8749 fprintf (stderr, "\n");
8750 phinodes_print_statistics ();
8751 fprintf (stderr, "\n");
8754 else
8755 fprintf (stderr, "(No per-node statistics)\n");
8757 print_type_hash_statistics ();
8758 print_debug_expr_statistics ();
8759 print_value_expr_statistics ();
8760 lang_hooks.print_statistics ();
8763 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8765 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8767 unsigned
8768 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8770 /* This relies on the raw feedback's top 4 bits being zero. */
8771 #define FEEDBACK(X) ((X) * 0x04c11db7)
8772 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8773 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8774 static const unsigned syndromes[16] =
8776 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8777 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8778 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8779 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8781 #undef FEEDBACK
8782 #undef SYNDROME
8784 value <<= (32 - bytes * 8);
8785 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8787 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8789 chksum = (chksum << 4) ^ feedback;
8792 return chksum;
8795 /* Generate a crc32 of a string. */
8797 unsigned
8798 crc32_string (unsigned chksum, const char *string)
8801 chksum = crc32_byte (chksum, *string);
8802 while (*string++);
8803 return chksum;
8806 /* P is a string that will be used in a symbol. Mask out any characters
8807 that are not valid in that context. */
8809 void
8810 clean_symbol_name (char *p)
8812 for (; *p; p++)
8813 if (! (ISALNUM (*p)
8814 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8815 || *p == '$'
8816 #endif
8817 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8818 || *p == '.'
8819 #endif
8821 *p = '_';
8824 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8826 /* Create a unique anonymous identifier. The identifier is still a
8827 valid assembly label. */
8829 tree
8830 make_anon_name ()
8832 const char *fmt =
8833 #if !defined (NO_DOT_IN_LABEL)
8835 #elif !defined (NO_DOLLAR_IN_LABEL)
8837 #else
8839 #endif
8840 "_anon_%d";
8842 char buf[24];
8843 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8844 gcc_checking_assert (len < int (sizeof (buf)));
8846 tree id = get_identifier_with_length (buf, len);
8847 IDENTIFIER_ANON_P (id) = true;
8849 return id;
8852 /* Generate a name for a special-purpose function.
8853 The generated name may need to be unique across the whole link.
8854 Changes to this function may also require corresponding changes to
8855 xstrdup_mask_random.
8856 TYPE is some string to identify the purpose of this function to the
8857 linker or collect2; it must start with an uppercase letter,
8858 one of:
8859 I - for constructors
8860 D - for destructors
8861 N - for C++ anonymous namespaces
8862 F - for DWARF unwind frame information. */
8864 tree
8865 get_file_function_name (const char *type)
8867 char *buf;
8868 const char *p;
8869 char *q;
8871 /* If we already have a name we know to be unique, just use that. */
8872 if (first_global_object_name)
8873 p = q = ASTRDUP (first_global_object_name);
8874 /* If the target is handling the constructors/destructors, they
8875 will be local to this file and the name is only necessary for
8876 debugging purposes.
8877 We also assign sub_I and sub_D sufixes to constructors called from
8878 the global static constructors. These are always local. */
8879 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8880 || (startswith (type, "sub_")
8881 && (type[4] == 'I' || type[4] == 'D')))
8883 const char *file = main_input_filename;
8884 if (! file)
8885 file = LOCATION_FILE (input_location);
8886 /* Just use the file's basename, because the full pathname
8887 might be quite long. */
8888 p = q = ASTRDUP (lbasename (file));
8890 else
8892 /* Otherwise, the name must be unique across the entire link.
8893 We don't have anything that we know to be unique to this translation
8894 unit, so use what we do have and throw in some randomness. */
8895 unsigned len;
8896 const char *name = weak_global_object_name;
8897 const char *file = main_input_filename;
8899 if (! name)
8900 name = "";
8901 if (! file)
8902 file = LOCATION_FILE (input_location);
8904 len = strlen (file);
8905 q = (char *) alloca (9 + 19 + len + 1);
8906 memcpy (q, file, len + 1);
8908 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8909 crc32_string (0, name), get_random_seed (false));
8911 p = q;
8914 clean_symbol_name (q);
8915 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8916 + strlen (type));
8918 /* Set up the name of the file-level functions we may need.
8919 Use a global object (which is already required to be unique over
8920 the program) rather than the file name (which imposes extra
8921 constraints). */
8922 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8924 return get_identifier (buf);
8927 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8929 /* Complain that the tree code of NODE does not match the expected 0
8930 terminated list of trailing codes. The trailing code list can be
8931 empty, for a more vague error message. FILE, LINE, and FUNCTION
8932 are of the caller. */
8934 void
8935 tree_check_failed (const_tree node, const char *file,
8936 int line, const char *function, ...)
8938 va_list args;
8939 const char *buffer;
8940 unsigned length = 0;
8941 enum tree_code code;
8943 va_start (args, function);
8944 while ((code = (enum tree_code) va_arg (args, int)))
8945 length += 4 + strlen (get_tree_code_name (code));
8946 va_end (args);
8947 if (length)
8949 char *tmp;
8950 va_start (args, function);
8951 length += strlen ("expected ");
8952 buffer = tmp = (char *) alloca (length);
8953 length = 0;
8954 while ((code = (enum tree_code) va_arg (args, int)))
8956 const char *prefix = length ? " or " : "expected ";
8958 strcpy (tmp + length, prefix);
8959 length += strlen (prefix);
8960 strcpy (tmp + length, get_tree_code_name (code));
8961 length += strlen (get_tree_code_name (code));
8963 va_end (args);
8965 else
8966 buffer = "unexpected node";
8968 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8969 buffer, get_tree_code_name (TREE_CODE (node)),
8970 function, trim_filename (file), line);
8973 /* Complain that the tree code of NODE does match the expected 0
8974 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8975 the caller. */
8977 void
8978 tree_not_check_failed (const_tree node, const char *file,
8979 int line, const char *function, ...)
8981 va_list args;
8982 char *buffer;
8983 unsigned length = 0;
8984 enum tree_code code;
8986 va_start (args, function);
8987 while ((code = (enum tree_code) va_arg (args, int)))
8988 length += 4 + strlen (get_tree_code_name (code));
8989 va_end (args);
8990 va_start (args, function);
8991 buffer = (char *) alloca (length);
8992 length = 0;
8993 while ((code = (enum tree_code) va_arg (args, int)))
8995 if (length)
8997 strcpy (buffer + length, " or ");
8998 length += 4;
9000 strcpy (buffer + length, get_tree_code_name (code));
9001 length += strlen (get_tree_code_name (code));
9003 va_end (args);
9005 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9006 buffer, get_tree_code_name (TREE_CODE (node)),
9007 function, trim_filename (file), line);
9010 /* Similar to tree_check_failed, except that we check for a class of tree
9011 code, given in CL. */
9013 void
9014 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9015 const char *file, int line, const char *function)
9017 internal_error
9018 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9019 TREE_CODE_CLASS_STRING (cl),
9020 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9021 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9024 /* Similar to tree_check_failed, except that instead of specifying a
9025 dozen codes, use the knowledge that they're all sequential. */
9027 void
9028 tree_range_check_failed (const_tree node, const char *file, int line,
9029 const char *function, enum tree_code c1,
9030 enum tree_code c2)
9032 char *buffer;
9033 unsigned length = 0;
9034 unsigned int c;
9036 for (c = c1; c <= c2; ++c)
9037 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9039 length += strlen ("expected ");
9040 buffer = (char *) alloca (length);
9041 length = 0;
9043 for (c = c1; c <= c2; ++c)
9045 const char *prefix = length ? " or " : "expected ";
9047 strcpy (buffer + length, prefix);
9048 length += strlen (prefix);
9049 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9050 length += strlen (get_tree_code_name ((enum tree_code) c));
9053 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9054 buffer, get_tree_code_name (TREE_CODE (node)),
9055 function, trim_filename (file), line);
9059 /* Similar to tree_check_failed, except that we check that a tree does
9060 not have the specified code, given in CL. */
9062 void
9063 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9064 const char *file, int line, const char *function)
9066 internal_error
9067 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9068 TREE_CODE_CLASS_STRING (cl),
9069 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9070 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9074 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9076 void
9077 omp_clause_check_failed (const_tree node, const char *file, int line,
9078 const char *function, enum omp_clause_code code)
9080 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9081 "in %s, at %s:%d",
9082 omp_clause_code_name[code],
9083 get_tree_code_name (TREE_CODE (node)),
9084 function, trim_filename (file), line);
9088 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9090 void
9091 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9092 const char *function, enum omp_clause_code c1,
9093 enum omp_clause_code c2)
9095 char *buffer;
9096 unsigned length = 0;
9097 unsigned int c;
9099 for (c = c1; c <= c2; ++c)
9100 length += 4 + strlen (omp_clause_code_name[c]);
9102 length += strlen ("expected ");
9103 buffer = (char *) alloca (length);
9104 length = 0;
9106 for (c = c1; c <= c2; ++c)
9108 const char *prefix = length ? " or " : "expected ";
9110 strcpy (buffer + length, prefix);
9111 length += strlen (prefix);
9112 strcpy (buffer + length, omp_clause_code_name[c]);
9113 length += strlen (omp_clause_code_name[c]);
9116 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9117 buffer, omp_clause_code_name[TREE_CODE (node)],
9118 function, trim_filename (file), line);
9122 #undef DEFTREESTRUCT
9123 #define DEFTREESTRUCT(VAL, NAME) NAME,
9125 static const char *ts_enum_names[] = {
9126 #include "treestruct.def"
9128 #undef DEFTREESTRUCT
9130 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9132 /* Similar to tree_class_check_failed, except that we check for
9133 whether CODE contains the tree structure identified by EN. */
9135 void
9136 tree_contains_struct_check_failed (const_tree node,
9137 const enum tree_node_structure_enum en,
9138 const char *file, int line,
9139 const char *function)
9141 internal_error
9142 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9143 TS_ENUM_NAME (en),
9144 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9148 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9149 (dynamically sized) vector. */
9151 void
9152 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9153 const char *function)
9155 internal_error
9156 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9157 "at %s:%d",
9158 idx + 1, len, function, trim_filename (file), line);
9161 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9162 (dynamically sized) vector. */
9164 void
9165 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9166 const char *function)
9168 internal_error
9169 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9170 idx + 1, len, function, trim_filename (file), line);
9173 /* Similar to above, except that the check is for the bounds of the operand
9174 vector of an expression node EXP. */
9176 void
9177 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9178 int line, const char *function)
9180 enum tree_code code = TREE_CODE (exp);
9181 internal_error
9182 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9183 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9184 function, trim_filename (file), line);
9187 /* Similar to above, except that the check is for the number of
9188 operands of an OMP_CLAUSE node. */
9190 void
9191 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9192 int line, const char *function)
9194 internal_error
9195 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9196 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9197 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9198 trim_filename (file), line);
9200 #endif /* ENABLE_TREE_CHECKING */
9202 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9203 and mapped to the machine mode MODE. Initialize its fields and build
9204 the information necessary for debugging output. */
9206 static tree
9207 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9209 tree t;
9210 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9212 t = make_node (VECTOR_TYPE);
9213 TREE_TYPE (t) = mv_innertype;
9214 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9215 SET_TYPE_MODE (t, mode);
9217 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9218 SET_TYPE_STRUCTURAL_EQUALITY (t);
9219 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9220 || mode != VOIDmode)
9221 && !VECTOR_BOOLEAN_TYPE_P (t))
9222 TYPE_CANONICAL (t)
9223 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9225 layout_type (t);
9227 hashval_t hash = type_hash_canon_hash (t);
9228 t = type_hash_canon (hash, t);
9230 /* We have built a main variant, based on the main variant of the
9231 inner type. Use it to build the variant we return. */
9232 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9233 && TREE_TYPE (t) != innertype)
9234 return build_type_attribute_qual_variant (t,
9235 TYPE_ATTRIBUTES (innertype),
9236 TYPE_QUALS (innertype));
9238 return t;
9241 static tree
9242 make_or_reuse_type (unsigned size, int unsignedp)
9244 int i;
9246 if (size == INT_TYPE_SIZE)
9247 return unsignedp ? unsigned_type_node : integer_type_node;
9248 if (size == CHAR_TYPE_SIZE)
9249 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9250 if (size == SHORT_TYPE_SIZE)
9251 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9252 if (size == LONG_TYPE_SIZE)
9253 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9254 if (size == LONG_LONG_TYPE_SIZE)
9255 return (unsignedp ? long_long_unsigned_type_node
9256 : long_long_integer_type_node);
9258 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9259 if (size == int_n_data[i].bitsize
9260 && int_n_enabled_p[i])
9261 return (unsignedp ? int_n_trees[i].unsigned_type
9262 : int_n_trees[i].signed_type);
9264 if (unsignedp)
9265 return make_unsigned_type (size);
9266 else
9267 return make_signed_type (size);
9270 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9272 static tree
9273 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9275 if (satp)
9277 if (size == SHORT_FRACT_TYPE_SIZE)
9278 return unsignedp ? sat_unsigned_short_fract_type_node
9279 : sat_short_fract_type_node;
9280 if (size == FRACT_TYPE_SIZE)
9281 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9282 if (size == LONG_FRACT_TYPE_SIZE)
9283 return unsignedp ? sat_unsigned_long_fract_type_node
9284 : sat_long_fract_type_node;
9285 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9286 return unsignedp ? sat_unsigned_long_long_fract_type_node
9287 : sat_long_long_fract_type_node;
9289 else
9291 if (size == SHORT_FRACT_TYPE_SIZE)
9292 return unsignedp ? unsigned_short_fract_type_node
9293 : short_fract_type_node;
9294 if (size == FRACT_TYPE_SIZE)
9295 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9296 if (size == LONG_FRACT_TYPE_SIZE)
9297 return unsignedp ? unsigned_long_fract_type_node
9298 : long_fract_type_node;
9299 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9300 return unsignedp ? unsigned_long_long_fract_type_node
9301 : long_long_fract_type_node;
9304 return make_fract_type (size, unsignedp, satp);
9307 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9309 static tree
9310 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9312 if (satp)
9314 if (size == SHORT_ACCUM_TYPE_SIZE)
9315 return unsignedp ? sat_unsigned_short_accum_type_node
9316 : sat_short_accum_type_node;
9317 if (size == ACCUM_TYPE_SIZE)
9318 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9319 if (size == LONG_ACCUM_TYPE_SIZE)
9320 return unsignedp ? sat_unsigned_long_accum_type_node
9321 : sat_long_accum_type_node;
9322 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9323 return unsignedp ? sat_unsigned_long_long_accum_type_node
9324 : sat_long_long_accum_type_node;
9326 else
9328 if (size == SHORT_ACCUM_TYPE_SIZE)
9329 return unsignedp ? unsigned_short_accum_type_node
9330 : short_accum_type_node;
9331 if (size == ACCUM_TYPE_SIZE)
9332 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9333 if (size == LONG_ACCUM_TYPE_SIZE)
9334 return unsignedp ? unsigned_long_accum_type_node
9335 : long_accum_type_node;
9336 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9337 return unsignedp ? unsigned_long_long_accum_type_node
9338 : long_long_accum_type_node;
9341 return make_accum_type (size, unsignedp, satp);
9345 /* Create an atomic variant node for TYPE. This routine is called
9346 during initialization of data types to create the 5 basic atomic
9347 types. The generic build_variant_type function requires these to
9348 already be set up in order to function properly, so cannot be
9349 called from there. If ALIGN is non-zero, then ensure alignment is
9350 overridden to this value. */
9352 static tree
9353 build_atomic_base (tree type, unsigned int align)
9355 tree t;
9357 /* Make sure its not already registered. */
9358 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9359 return t;
9361 t = build_variant_type_copy (type);
9362 set_type_quals (t, TYPE_QUAL_ATOMIC);
9364 if (align)
9365 SET_TYPE_ALIGN (t, align);
9367 return t;
9370 /* Information about the _FloatN and _FloatNx types. This must be in
9371 the same order as the corresponding TI_* enum values. */
9372 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9374 { 16, false },
9375 { 32, false },
9376 { 64, false },
9377 { 128, false },
9378 { 32, true },
9379 { 64, true },
9380 { 128, true },
9384 /* Create nodes for all integer types (and error_mark_node) using the sizes
9385 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9387 void
9388 build_common_tree_nodes (bool signed_char)
9390 int i;
9392 error_mark_node = make_node (ERROR_MARK);
9393 TREE_TYPE (error_mark_node) = error_mark_node;
9395 initialize_sizetypes ();
9397 /* Define both `signed char' and `unsigned char'. */
9398 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9399 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9400 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9401 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9403 /* Define `char', which is like either `signed char' or `unsigned char'
9404 but not the same as either. */
9405 char_type_node
9406 = (signed_char
9407 ? make_signed_type (CHAR_TYPE_SIZE)
9408 : make_unsigned_type (CHAR_TYPE_SIZE));
9409 TYPE_STRING_FLAG (char_type_node) = 1;
9411 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9412 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9413 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9414 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9415 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9416 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9417 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9418 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9420 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9422 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9423 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9425 if (int_n_enabled_p[i])
9427 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9428 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9432 /* Define a boolean type. This type only represents boolean values but
9433 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9434 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9435 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9436 TYPE_PRECISION (boolean_type_node) = 1;
9437 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9439 /* Define what type to use for size_t. */
9440 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9441 size_type_node = unsigned_type_node;
9442 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9443 size_type_node = long_unsigned_type_node;
9444 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9445 size_type_node = long_long_unsigned_type_node;
9446 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9447 size_type_node = short_unsigned_type_node;
9448 else
9450 int i;
9452 size_type_node = NULL_TREE;
9453 for (i = 0; i < NUM_INT_N_ENTS; i++)
9454 if (int_n_enabled_p[i])
9456 char name[50], altname[50];
9457 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9458 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9460 if (strcmp (name, SIZE_TYPE) == 0
9461 || strcmp (altname, SIZE_TYPE) == 0)
9463 size_type_node = int_n_trees[i].unsigned_type;
9466 if (size_type_node == NULL_TREE)
9467 gcc_unreachable ();
9470 /* Define what type to use for ptrdiff_t. */
9471 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9472 ptrdiff_type_node = integer_type_node;
9473 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9474 ptrdiff_type_node = long_integer_type_node;
9475 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9476 ptrdiff_type_node = long_long_integer_type_node;
9477 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9478 ptrdiff_type_node = short_integer_type_node;
9479 else
9481 ptrdiff_type_node = NULL_TREE;
9482 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9483 if (int_n_enabled_p[i])
9485 char name[50], altname[50];
9486 sprintf (name, "__int%d", int_n_data[i].bitsize);
9487 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9489 if (strcmp (name, PTRDIFF_TYPE) == 0
9490 || strcmp (altname, PTRDIFF_TYPE) == 0)
9491 ptrdiff_type_node = int_n_trees[i].signed_type;
9493 if (ptrdiff_type_node == NULL_TREE)
9494 gcc_unreachable ();
9497 /* Fill in the rest of the sized types. Reuse existing type nodes
9498 when possible. */
9499 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9500 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9501 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9502 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9503 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9505 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9506 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9507 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9508 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9509 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9511 /* Don't call build_qualified type for atomics. That routine does
9512 special processing for atomics, and until they are initialized
9513 it's better not to make that call.
9515 Check to see if there is a target override for atomic types. */
9517 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9518 targetm.atomic_align_for_mode (QImode));
9519 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9520 targetm.atomic_align_for_mode (HImode));
9521 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9522 targetm.atomic_align_for_mode (SImode));
9523 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9524 targetm.atomic_align_for_mode (DImode));
9525 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9526 targetm.atomic_align_for_mode (TImode));
9528 access_public_node = get_identifier ("public");
9529 access_protected_node = get_identifier ("protected");
9530 access_private_node = get_identifier ("private");
9532 /* Define these next since types below may used them. */
9533 integer_zero_node = build_int_cst (integer_type_node, 0);
9534 integer_one_node = build_int_cst (integer_type_node, 1);
9535 integer_three_node = build_int_cst (integer_type_node, 3);
9536 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9538 size_zero_node = size_int (0);
9539 size_one_node = size_int (1);
9540 bitsize_zero_node = bitsize_int (0);
9541 bitsize_one_node = bitsize_int (1);
9542 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9544 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9545 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9547 void_type_node = make_node (VOID_TYPE);
9548 layout_type (void_type_node);
9550 /* We are not going to have real types in C with less than byte alignment,
9551 so we might as well not have any types that claim to have it. */
9552 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9553 TYPE_USER_ALIGN (void_type_node) = 0;
9555 void_node = make_node (VOID_CST);
9556 TREE_TYPE (void_node) = void_type_node;
9558 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9560 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9561 layout_type (TREE_TYPE (null_pointer_node));
9563 ptr_type_node = build_pointer_type (void_type_node);
9564 const_ptr_type_node
9565 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9566 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9567 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9569 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9571 float_type_node = make_node (REAL_TYPE);
9572 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9573 layout_type (float_type_node);
9575 double_type_node = make_node (REAL_TYPE);
9576 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9577 layout_type (double_type_node);
9579 long_double_type_node = make_node (REAL_TYPE);
9580 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9581 layout_type (long_double_type_node);
9583 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9585 int n = floatn_nx_types[i].n;
9586 bool extended = floatn_nx_types[i].extended;
9587 scalar_float_mode mode;
9588 if (!targetm.floatn_mode (n, extended).exists (&mode))
9589 continue;
9590 int precision = GET_MODE_PRECISION (mode);
9591 /* Work around the rs6000 KFmode having precision 113 not
9592 128. */
9593 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9594 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9595 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9596 if (!extended)
9597 gcc_assert (min_precision == n);
9598 if (precision < min_precision)
9599 precision = min_precision;
9600 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9601 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9602 layout_type (FLOATN_NX_TYPE_NODE (i));
9603 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9605 float128t_type_node = float128_type_node;
9606 #ifdef HAVE_BFmode
9607 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9608 && targetm.scalar_mode_supported_p (BFmode)
9609 && targetm.libgcc_floating_mode_supported_p (BFmode))
9611 bfloat16_type_node = make_node (REAL_TYPE);
9612 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9613 layout_type (bfloat16_type_node);
9614 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9616 #endif
9618 float_ptr_type_node = build_pointer_type (float_type_node);
9619 double_ptr_type_node = build_pointer_type (double_type_node);
9620 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9621 integer_ptr_type_node = build_pointer_type (integer_type_node);
9623 /* Fixed size integer types. */
9624 uint16_type_node = make_or_reuse_type (16, 1);
9625 uint32_type_node = make_or_reuse_type (32, 1);
9626 uint64_type_node = make_or_reuse_type (64, 1);
9627 if (targetm.scalar_mode_supported_p (TImode))
9628 uint128_type_node = make_or_reuse_type (128, 1);
9630 /* Decimal float types. */
9631 if (targetm.decimal_float_supported_p ())
9633 dfloat32_type_node = make_node (REAL_TYPE);
9634 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9635 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9636 layout_type (dfloat32_type_node);
9638 dfloat64_type_node = make_node (REAL_TYPE);
9639 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9640 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9641 layout_type (dfloat64_type_node);
9643 dfloat128_type_node = make_node (REAL_TYPE);
9644 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9645 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9646 layout_type (dfloat128_type_node);
9649 complex_integer_type_node = build_complex_type (integer_type_node, true);
9650 complex_float_type_node = build_complex_type (float_type_node, true);
9651 complex_double_type_node = build_complex_type (double_type_node, true);
9652 complex_long_double_type_node = build_complex_type (long_double_type_node,
9653 true);
9655 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9657 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9658 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9659 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9662 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9663 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9664 sat_ ## KIND ## _type_node = \
9665 make_sat_signed_ ## KIND ## _type (SIZE); \
9666 sat_unsigned_ ## KIND ## _type_node = \
9667 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9668 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9669 unsigned_ ## KIND ## _type_node = \
9670 make_unsigned_ ## KIND ## _type (SIZE);
9672 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9673 sat_ ## WIDTH ## KIND ## _type_node = \
9674 make_sat_signed_ ## KIND ## _type (SIZE); \
9675 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9676 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9677 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9678 unsigned_ ## WIDTH ## KIND ## _type_node = \
9679 make_unsigned_ ## KIND ## _type (SIZE);
9681 /* Make fixed-point type nodes based on four different widths. */
9682 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9683 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9684 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9685 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9686 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9688 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9689 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9690 NAME ## _type_node = \
9691 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9692 u ## NAME ## _type_node = \
9693 make_or_reuse_unsigned_ ## KIND ## _type \
9694 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9695 sat_ ## NAME ## _type_node = \
9696 make_or_reuse_sat_signed_ ## KIND ## _type \
9697 (GET_MODE_BITSIZE (MODE ## mode)); \
9698 sat_u ## NAME ## _type_node = \
9699 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9700 (GET_MODE_BITSIZE (U ## MODE ## mode));
9702 /* Fixed-point type and mode nodes. */
9703 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9704 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9705 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9706 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9707 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9708 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9709 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9710 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9711 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9712 MAKE_FIXED_MODE_NODE (accum, da, DA)
9713 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9716 tree t = targetm.build_builtin_va_list ();
9718 /* Many back-ends define record types without setting TYPE_NAME.
9719 If we copied the record type here, we'd keep the original
9720 record type without a name. This breaks name mangling. So,
9721 don't copy record types and let c_common_nodes_and_builtins()
9722 declare the type to be __builtin_va_list. */
9723 if (TREE_CODE (t) != RECORD_TYPE)
9724 t = build_variant_type_copy (t);
9726 va_list_type_node = t;
9729 /* SCEV analyzer global shared trees. */
9730 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9731 TREE_TYPE (chrec_dont_know) = void_type_node;
9732 chrec_known = make_node (SCEV_KNOWN);
9733 TREE_TYPE (chrec_known) = void_type_node;
9736 /* Modify DECL for given flags.
9737 TM_PURE attribute is set only on types, so the function will modify
9738 DECL's type when ECF_TM_PURE is used. */
9740 void
9741 set_call_expr_flags (tree decl, int flags)
9743 if (flags & ECF_NOTHROW)
9744 TREE_NOTHROW (decl) = 1;
9745 if (flags & ECF_CONST)
9746 TREE_READONLY (decl) = 1;
9747 if (flags & ECF_PURE)
9748 DECL_PURE_P (decl) = 1;
9749 if (flags & ECF_LOOPING_CONST_OR_PURE)
9750 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9751 if (flags & ECF_NOVOPS)
9752 DECL_IS_NOVOPS (decl) = 1;
9753 if (flags & ECF_NORETURN)
9754 TREE_THIS_VOLATILE (decl) = 1;
9755 if (flags & ECF_MALLOC)
9756 DECL_IS_MALLOC (decl) = 1;
9757 if (flags & ECF_RETURNS_TWICE)
9758 DECL_IS_RETURNS_TWICE (decl) = 1;
9759 if (flags & ECF_LEAF)
9760 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9761 NULL, DECL_ATTRIBUTES (decl));
9762 if (flags & ECF_COLD)
9763 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9764 NULL, DECL_ATTRIBUTES (decl));
9765 if (flags & ECF_RET1)
9766 DECL_ATTRIBUTES (decl)
9767 = tree_cons (get_identifier ("fn spec"),
9768 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9769 DECL_ATTRIBUTES (decl));
9770 if ((flags & ECF_TM_PURE) && flag_tm)
9771 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9772 if ((flags & ECF_XTHROW))
9773 DECL_ATTRIBUTES (decl)
9774 = tree_cons (get_identifier ("expected_throw"),
9775 NULL, DECL_ATTRIBUTES (decl));
9776 /* Looping const or pure is implied by noreturn.
9777 There is currently no way to declare looping const or looping pure alone. */
9778 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9779 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9783 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9785 static void
9786 local_define_builtin (const char *name, tree type, enum built_in_function code,
9787 const char *library_name, int ecf_flags)
9789 tree decl;
9791 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9792 library_name, NULL_TREE);
9793 set_call_expr_flags (decl, ecf_flags);
9795 set_builtin_decl (code, decl, true);
9798 /* Call this function after instantiating all builtins that the language
9799 front end cares about. This will build the rest of the builtins
9800 and internal functions that are relied upon by the tree optimizers and
9801 the middle-end. */
9803 void
9804 build_common_builtin_nodes (void)
9806 tree tmp, ftype;
9807 int ecf_flags;
9809 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9811 ftype = build_function_type_list (void_type_node,
9812 ptr_type_node,
9813 ptr_type_node,
9814 integer_type_node,
9815 NULL_TREE);
9816 local_define_builtin ("__builtin_clear_padding", ftype,
9817 BUILT_IN_CLEAR_PADDING,
9818 "__builtin_clear_padding",
9819 ECF_LEAF | ECF_NOTHROW);
9822 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9823 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9824 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9825 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9827 ftype = build_function_type (void_type_node, void_list_node);
9828 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9829 local_define_builtin ("__builtin_unreachable", ftype,
9830 BUILT_IN_UNREACHABLE,
9831 "__builtin_unreachable",
9832 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9833 | ECF_CONST | ECF_COLD);
9834 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9835 local_define_builtin ("__builtin_unreachable trap", ftype,
9836 BUILT_IN_UNREACHABLE_TRAP,
9837 "__builtin_unreachable trap",
9838 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9839 | ECF_CONST | ECF_COLD);
9840 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9841 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9842 "abort",
9843 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9844 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9845 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9846 "__builtin_trap",
9847 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9850 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9851 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9853 ftype = build_function_type_list (ptr_type_node,
9854 ptr_type_node, const_ptr_type_node,
9855 size_type_node, NULL_TREE);
9857 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9858 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9859 "memcpy", ECF_NOTHROW | ECF_LEAF);
9860 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9861 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9862 "memmove", ECF_NOTHROW | ECF_LEAF);
9865 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9867 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9868 const_ptr_type_node, size_type_node,
9869 NULL_TREE);
9870 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9871 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9874 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9876 ftype = build_function_type_list (ptr_type_node,
9877 ptr_type_node, integer_type_node,
9878 size_type_node, NULL_TREE);
9879 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9880 "memset", ECF_NOTHROW | ECF_LEAF);
9883 /* If we're checking the stack, `alloca' can throw. */
9884 const int alloca_flags
9885 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9887 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9889 ftype = build_function_type_list (ptr_type_node,
9890 size_type_node, NULL_TREE);
9891 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9892 "alloca", alloca_flags);
9895 ftype = build_function_type_list (ptr_type_node, size_type_node,
9896 size_type_node, NULL_TREE);
9897 local_define_builtin ("__builtin_alloca_with_align", ftype,
9898 BUILT_IN_ALLOCA_WITH_ALIGN,
9899 "__builtin_alloca_with_align",
9900 alloca_flags);
9902 ftype = build_function_type_list (ptr_type_node, size_type_node,
9903 size_type_node, size_type_node, NULL_TREE);
9904 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9905 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9906 "__builtin_alloca_with_align_and_max",
9907 alloca_flags);
9909 ftype = build_function_type_list (void_type_node,
9910 ptr_type_node, ptr_type_node,
9911 ptr_type_node, NULL_TREE);
9912 local_define_builtin ("__builtin_init_trampoline", ftype,
9913 BUILT_IN_INIT_TRAMPOLINE,
9914 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9915 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9916 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9917 "__builtin_init_heap_trampoline",
9918 ECF_NOTHROW | ECF_LEAF);
9919 local_define_builtin ("__builtin_init_descriptor", ftype,
9920 BUILT_IN_INIT_DESCRIPTOR,
9921 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9923 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9924 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9925 BUILT_IN_ADJUST_TRAMPOLINE,
9926 "__builtin_adjust_trampoline",
9927 ECF_CONST | ECF_NOTHROW);
9928 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9929 BUILT_IN_ADJUST_DESCRIPTOR,
9930 "__builtin_adjust_descriptor",
9931 ECF_CONST | ECF_NOTHROW);
9933 ftype = build_function_type_list (void_type_node,
9934 ptr_type_node, ptr_type_node, NULL_TREE);
9935 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9936 local_define_builtin ("__builtin___clear_cache", ftype,
9937 BUILT_IN_CLEAR_CACHE,
9938 "__clear_cache",
9939 ECF_NOTHROW);
9941 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9942 BUILT_IN_NONLOCAL_GOTO,
9943 "__builtin_nonlocal_goto",
9944 ECF_NORETURN | ECF_NOTHROW);
9946 tree ptr_ptr_type_node = build_pointer_type (ptr_type_node);
9948 if (!builtin_decl_explicit_p (BUILT_IN_GCC_NESTED_PTR_CREATED))
9950 ftype = build_function_type_list (void_type_node,
9951 ptr_type_node, // void *chain
9952 ptr_type_node, // void *func
9953 ptr_ptr_type_node, // void **dst
9954 NULL_TREE);
9955 local_define_builtin ("__builtin___gcc_nested_func_ptr_created", ftype,
9956 BUILT_IN_GCC_NESTED_PTR_CREATED,
9957 "__gcc_nested_func_ptr_created", ECF_NOTHROW);
9960 if (!builtin_decl_explicit_p (BUILT_IN_GCC_NESTED_PTR_DELETED))
9962 ftype = build_function_type_list (void_type_node, NULL_TREE);
9963 local_define_builtin ("__builtin___gcc_nested_func_ptr_deleted", ftype,
9964 BUILT_IN_GCC_NESTED_PTR_DELETED,
9965 "__gcc_nested_func_ptr_deleted", ECF_NOTHROW);
9968 ftype = build_function_type_list (void_type_node,
9969 ptr_type_node, ptr_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_setjmp_setup", ftype,
9971 BUILT_IN_SETJMP_SETUP,
9972 "__builtin_setjmp_setup", ECF_NOTHROW);
9974 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9975 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9976 BUILT_IN_SETJMP_RECEIVER,
9977 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9979 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9981 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9983 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9984 local_define_builtin ("__builtin_stack_restore", ftype,
9985 BUILT_IN_STACK_RESTORE,
9986 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9988 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9989 const_ptr_type_node, size_type_node,
9990 NULL_TREE);
9991 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9992 "__builtin_memcmp_eq",
9993 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9995 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9996 "__builtin_strncmp_eq",
9997 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9999 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10000 "__builtin_strcmp_eq",
10001 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10003 /* If there's a possibility that we might use the ARM EABI, build the
10004 alternate __cxa_end_cleanup node used to resume from C++. */
10005 if (targetm.arm_eabi_unwinder)
10007 ftype = build_function_type_list (void_type_node, NULL_TREE);
10008 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10009 BUILT_IN_CXA_END_CLEANUP,
10010 "__cxa_end_cleanup",
10011 ECF_NORETURN | ECF_XTHROW | ECF_LEAF);
10014 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10015 local_define_builtin ("__builtin_unwind_resume", ftype,
10016 BUILT_IN_UNWIND_RESUME,
10017 ((targetm_common.except_unwind_info (&global_options)
10018 == UI_SJLJ)
10019 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10020 ECF_NORETURN | ECF_XTHROW);
10022 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10024 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10025 NULL_TREE);
10026 local_define_builtin ("__builtin_return_address", ftype,
10027 BUILT_IN_RETURN_ADDRESS,
10028 "__builtin_return_address",
10029 ECF_NOTHROW);
10032 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10033 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10035 ftype = build_function_type_list (void_type_node, ptr_type_node,
10036 ptr_type_node, NULL_TREE);
10037 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10038 local_define_builtin ("__cyg_profile_func_enter", ftype,
10039 BUILT_IN_PROFILE_FUNC_ENTER,
10040 "__cyg_profile_func_enter", 0);
10041 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10042 local_define_builtin ("__cyg_profile_func_exit", ftype,
10043 BUILT_IN_PROFILE_FUNC_EXIT,
10044 "__cyg_profile_func_exit", 0);
10047 /* The exception object and filter values from the runtime. The argument
10048 must be zero before exception lowering, i.e. from the front end. After
10049 exception lowering, it will be the region number for the exception
10050 landing pad. These functions are PURE instead of CONST to prevent
10051 them from being hoisted past the exception edge that will initialize
10052 its value in the landing pad. */
10053 ftype = build_function_type_list (ptr_type_node,
10054 integer_type_node, NULL_TREE);
10055 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10056 /* Only use TM_PURE if we have TM language support. */
10057 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10058 ecf_flags |= ECF_TM_PURE;
10059 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10060 "__builtin_eh_pointer", ecf_flags);
10062 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10063 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10064 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10065 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10067 ftype = build_function_type_list (void_type_node,
10068 integer_type_node, integer_type_node,
10069 NULL_TREE);
10070 local_define_builtin ("__builtin_eh_copy_values", ftype,
10071 BUILT_IN_EH_COPY_VALUES,
10072 "__builtin_eh_copy_values", ECF_NOTHROW);
10074 /* Complex multiplication and division. These are handled as builtins
10075 rather than optabs because emit_library_call_value doesn't support
10076 complex. Further, we can do slightly better with folding these
10077 beasties if the real and complex parts of the arguments are separate. */
10079 int mode;
10081 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10083 char mode_name_buf[4], *q;
10084 const char *p;
10085 enum built_in_function mcode, dcode;
10086 tree type, inner_type;
10087 const char *prefix = "__";
10089 if (targetm.libfunc_gnu_prefix)
10090 prefix = "__gnu_";
10092 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10093 if (type == NULL)
10094 continue;
10095 inner_type = TREE_TYPE (type);
10097 ftype = build_function_type_list (type, inner_type, inner_type,
10098 inner_type, inner_type, NULL_TREE);
10100 mcode = ((enum built_in_function)
10101 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10102 dcode = ((enum built_in_function)
10103 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10105 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10106 *q = TOLOWER (*p);
10107 *q = '\0';
10109 /* For -ftrapping-math these should throw from a former
10110 -fnon-call-exception stmt. */
10111 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10112 NULL);
10113 local_define_builtin (built_in_names[mcode], ftype, mcode,
10114 built_in_names[mcode],
10115 ECF_CONST | ECF_LEAF);
10117 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10118 NULL);
10119 local_define_builtin (built_in_names[dcode], ftype, dcode,
10120 built_in_names[dcode],
10121 ECF_CONST | ECF_LEAF);
10125 init_internal_fns ();
10128 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10129 better way.
10131 If we requested a pointer to a vector, build up the pointers that
10132 we stripped off while looking for the inner type. Similarly for
10133 return values from functions.
10135 The argument TYPE is the top of the chain, and BOTTOM is the
10136 new type which we will point to. */
10138 tree
10139 reconstruct_complex_type (tree type, tree bottom)
10141 tree inner, outer;
10143 if (TREE_CODE (type) == POINTER_TYPE)
10145 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10146 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10147 TYPE_REF_CAN_ALIAS_ALL (type));
10149 else if (TREE_CODE (type) == REFERENCE_TYPE)
10151 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10152 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10153 TYPE_REF_CAN_ALIAS_ALL (type));
10155 else if (TREE_CODE (type) == ARRAY_TYPE)
10157 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10158 outer = build_array_type (inner, TYPE_DOMAIN (type));
10160 else if (TREE_CODE (type) == FUNCTION_TYPE)
10162 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10163 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10164 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10166 else if (TREE_CODE (type) == METHOD_TYPE)
10168 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10169 /* The build_method_type_directly() routine prepends 'this' to argument list,
10170 so we must compensate by getting rid of it. */
10171 outer
10172 = build_method_type_directly
10173 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10174 inner,
10175 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10177 else if (TREE_CODE (type) == OFFSET_TYPE)
10179 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10180 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10182 else
10183 return bottom;
10185 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10186 TYPE_QUALS (type));
10189 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10190 the inner type. */
10191 tree
10192 build_vector_type_for_mode (tree innertype, machine_mode mode)
10194 poly_int64 nunits;
10195 unsigned int bitsize;
10197 switch (GET_MODE_CLASS (mode))
10199 case MODE_VECTOR_BOOL:
10200 case MODE_VECTOR_INT:
10201 case MODE_VECTOR_FLOAT:
10202 case MODE_VECTOR_FRACT:
10203 case MODE_VECTOR_UFRACT:
10204 case MODE_VECTOR_ACCUM:
10205 case MODE_VECTOR_UACCUM:
10206 nunits = GET_MODE_NUNITS (mode);
10207 break;
10209 case MODE_INT:
10210 /* Check that there are no leftover bits. */
10211 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10212 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10213 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10214 break;
10216 default:
10217 gcc_unreachable ();
10220 return make_vector_type (innertype, nunits, mode);
10223 /* Similarly, but takes the inner type and number of units, which must be
10224 a power of two. */
10226 tree
10227 build_vector_type (tree innertype, poly_int64 nunits)
10229 return make_vector_type (innertype, nunits, VOIDmode);
10232 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10234 tree
10235 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10237 gcc_assert (mask_mode != BLKmode);
10239 unsigned HOST_WIDE_INT esize;
10240 if (VECTOR_MODE_P (mask_mode))
10242 poly_uint64 vsize = GET_MODE_PRECISION (mask_mode);
10243 esize = vector_element_size (vsize, nunits);
10245 else
10246 esize = 1;
10248 tree bool_type = build_nonstandard_boolean_type (esize);
10250 return make_vector_type (bool_type, nunits, mask_mode);
10253 /* Build a vector type that holds one boolean result for each element of
10254 vector type VECTYPE. The public interface for this operation is
10255 truth_type_for. */
10257 static tree
10258 build_truth_vector_type_for (tree vectype)
10260 machine_mode vector_mode = TYPE_MODE (vectype);
10261 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10263 machine_mode mask_mode;
10264 if (VECTOR_MODE_P (vector_mode)
10265 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10266 return build_truth_vector_type_for_mode (nunits, mask_mode);
10268 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10269 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10270 tree bool_type = build_nonstandard_boolean_type (esize);
10272 return make_vector_type (bool_type, nunits, VOIDmode);
10275 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10276 set. */
10278 tree
10279 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10281 tree t = make_vector_type (innertype, nunits, VOIDmode);
10282 tree cand;
10283 /* We always build the non-opaque variant before the opaque one,
10284 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10285 cand = TYPE_NEXT_VARIANT (t);
10286 if (cand
10287 && TYPE_VECTOR_OPAQUE (cand)
10288 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10289 return cand;
10290 /* Othewise build a variant type and make sure to queue it after
10291 the non-opaque type. */
10292 cand = build_distinct_type_copy (t);
10293 TYPE_VECTOR_OPAQUE (cand) = true;
10294 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10295 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10296 TYPE_NEXT_VARIANT (t) = cand;
10297 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10298 /* Type variants have no alias set defined. */
10299 TYPE_ALIAS_SET (cand) = -1;
10300 return cand;
10303 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10305 static poly_wide_int
10306 vector_cst_int_elt (const_tree t, unsigned int i)
10308 /* First handle elements that are directly encoded. */
10309 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10310 if (i < encoded_nelts)
10311 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10313 /* Identify the pattern that contains element I and work out the index of
10314 the last encoded element for that pattern. */
10315 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10316 unsigned int pattern = i % npatterns;
10317 unsigned int count = i / npatterns;
10318 unsigned int final_i = encoded_nelts - npatterns + pattern;
10320 /* If there are no steps, the final encoded value is the right one. */
10321 if (!VECTOR_CST_STEPPED_P (t))
10322 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10324 /* Otherwise work out the value from the last two encoded elements. */
10325 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10326 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10327 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10328 return wi::to_poly_wide (v2) + (count - 2) * diff;
10331 /* Return the value of element I of VECTOR_CST T. */
10333 tree
10334 vector_cst_elt (const_tree t, unsigned int i)
10336 /* First handle elements that are directly encoded. */
10337 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10338 if (i < encoded_nelts)
10339 return VECTOR_CST_ENCODED_ELT (t, i);
10341 /* If there are no steps, the final encoded value is the right one. */
10342 if (!VECTOR_CST_STEPPED_P (t))
10344 /* Identify the pattern that contains element I and work out the index of
10345 the last encoded element for that pattern. */
10346 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10347 unsigned int pattern = i % npatterns;
10348 unsigned int final_i = encoded_nelts - npatterns + pattern;
10349 return VECTOR_CST_ENCODED_ELT (t, final_i);
10352 /* Otherwise work out the value from the last two encoded elements. */
10353 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10354 vector_cst_int_elt (t, i));
10357 /* Given an initializer INIT, return TRUE if INIT is zero or some
10358 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10359 null, set *NONZERO if and only if INIT is known not to be all
10360 zeros. The combination of return value of false and *NONZERO
10361 false implies that INIT may but need not be all zeros. Other
10362 combinations indicate definitive answers. */
10364 bool
10365 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10367 bool dummy;
10368 if (!nonzero)
10369 nonzero = &dummy;
10371 /* Conservatively clear NONZERO and set it only if INIT is definitely
10372 not all zero. */
10373 *nonzero = false;
10375 STRIP_NOPS (init);
10377 unsigned HOST_WIDE_INT off = 0;
10379 switch (TREE_CODE (init))
10381 case INTEGER_CST:
10382 if (integer_zerop (init))
10383 return true;
10385 *nonzero = true;
10386 return false;
10388 case REAL_CST:
10389 /* ??? Note that this is not correct for C4X float formats. There,
10390 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10391 negative exponent. */
10392 if (real_zerop (init)
10393 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10394 return true;
10396 *nonzero = true;
10397 return false;
10399 case FIXED_CST:
10400 if (fixed_zerop (init))
10401 return true;
10403 *nonzero = true;
10404 return false;
10406 case COMPLEX_CST:
10407 if (integer_zerop (init)
10408 || (real_zerop (init)
10409 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10410 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10411 return true;
10413 *nonzero = true;
10414 return false;
10416 case VECTOR_CST:
10417 if (VECTOR_CST_NPATTERNS (init) == 1
10418 && VECTOR_CST_DUPLICATE_P (init)
10419 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10420 return true;
10422 *nonzero = true;
10423 return false;
10425 case CONSTRUCTOR:
10427 if (TREE_CLOBBER_P (init))
10428 return false;
10430 unsigned HOST_WIDE_INT idx;
10431 tree elt;
10433 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10434 if (!initializer_zerop (elt, nonzero))
10435 return false;
10437 return true;
10440 case MEM_REF:
10442 tree arg = TREE_OPERAND (init, 0);
10443 if (TREE_CODE (arg) != ADDR_EXPR)
10444 return false;
10445 tree offset = TREE_OPERAND (init, 1);
10446 if (TREE_CODE (offset) != INTEGER_CST
10447 || !tree_fits_uhwi_p (offset))
10448 return false;
10449 off = tree_to_uhwi (offset);
10450 if (INT_MAX < off)
10451 return false;
10452 arg = TREE_OPERAND (arg, 0);
10453 if (TREE_CODE (arg) != STRING_CST)
10454 return false;
10455 init = arg;
10457 /* Fall through. */
10459 case STRING_CST:
10461 gcc_assert (off <= INT_MAX);
10463 int i = off;
10464 int n = TREE_STRING_LENGTH (init);
10465 if (n <= i)
10466 return false;
10468 /* We need to loop through all elements to handle cases like
10469 "\0" and "\0foobar". */
10470 for (i = 0; i < n; ++i)
10471 if (TREE_STRING_POINTER (init)[i] != '\0')
10473 *nonzero = true;
10474 return false;
10477 return true;
10480 default:
10481 return false;
10485 /* Return true if EXPR is an initializer expression in which every element
10486 is a constant that is numerically equal to 0 or 1. The elements do not
10487 need to be equal to each other. */
10489 bool
10490 initializer_each_zero_or_onep (const_tree expr)
10492 STRIP_ANY_LOCATION_WRAPPER (expr);
10494 switch (TREE_CODE (expr))
10496 case INTEGER_CST:
10497 return integer_zerop (expr) || integer_onep (expr);
10499 case REAL_CST:
10500 return real_zerop (expr) || real_onep (expr);
10502 case VECTOR_CST:
10504 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10505 if (VECTOR_CST_STEPPED_P (expr)
10506 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10507 return false;
10509 for (unsigned int i = 0; i < nelts; ++i)
10511 tree elt = vector_cst_elt (expr, i);
10512 if (!initializer_each_zero_or_onep (elt))
10513 return false;
10516 return true;
10519 default:
10520 return false;
10524 /* Check if vector VEC consists of all the equal elements and
10525 that the number of elements corresponds to the type of VEC.
10526 The function returns first element of the vector
10527 or NULL_TREE if the vector is not uniform. */
10528 tree
10529 uniform_vector_p (const_tree vec)
10531 tree first, t;
10532 unsigned HOST_WIDE_INT i, nelts;
10534 if (vec == NULL_TREE)
10535 return NULL_TREE;
10537 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10539 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10540 return TREE_OPERAND (vec, 0);
10542 else if (TREE_CODE (vec) == VECTOR_CST)
10544 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10545 return VECTOR_CST_ENCODED_ELT (vec, 0);
10546 return NULL_TREE;
10549 else if (TREE_CODE (vec) == CONSTRUCTOR
10550 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10552 first = error_mark_node;
10554 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10556 if (i == 0)
10558 first = t;
10559 continue;
10561 if (!operand_equal_p (first, t, 0))
10562 return NULL_TREE;
10564 if (i != nelts)
10565 return NULL_TREE;
10567 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10568 return uniform_vector_p (first);
10569 return first;
10572 return NULL_TREE;
10575 /* If the argument is INTEGER_CST, return it. If the argument is vector
10576 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10577 return NULL_TREE.
10578 Look through location wrappers. */
10580 tree
10581 uniform_integer_cst_p (tree t)
10583 STRIP_ANY_LOCATION_WRAPPER (t);
10585 if (TREE_CODE (t) == INTEGER_CST)
10586 return t;
10588 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10590 t = uniform_vector_p (t);
10591 if (t && TREE_CODE (t) == INTEGER_CST)
10592 return t;
10595 return NULL_TREE;
10598 /* Checks to see if T is a constant or a constant vector and if each element E
10599 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10601 tree
10602 bitmask_inv_cst_vector_p (tree t)
10605 tree_code code = TREE_CODE (t);
10606 tree type = TREE_TYPE (t);
10608 if (!INTEGRAL_TYPE_P (type)
10609 && !VECTOR_INTEGER_TYPE_P (type))
10610 return NULL_TREE;
10612 unsigned HOST_WIDE_INT nelts = 1;
10613 tree cst;
10614 unsigned int idx = 0;
10615 bool uniform = uniform_integer_cst_p (t);
10616 tree newtype = unsigned_type_for (type);
10617 tree_vector_builder builder;
10618 if (code == INTEGER_CST)
10619 cst = t;
10620 else
10622 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10623 return NULL_TREE;
10625 cst = vector_cst_elt (t, 0);
10626 builder.new_vector (newtype, nelts, 1);
10629 tree ty = unsigned_type_for (TREE_TYPE (cst));
10633 if (idx > 0)
10634 cst = vector_cst_elt (t, idx);
10635 wide_int icst = wi::to_wide (cst);
10636 wide_int inv = wi::bit_not (icst);
10637 icst = wi::add (1, inv);
10638 if (wi::popcount (icst) != 1)
10639 return NULL_TREE;
10641 tree newcst = wide_int_to_tree (ty, inv);
10643 if (uniform)
10644 return build_uniform_cst (newtype, newcst);
10646 builder.quick_push (newcst);
10648 while (++idx < nelts);
10650 return builder.build ();
10653 /* If VECTOR_CST T has a single nonzero element, return the index of that
10654 element, otherwise return -1. */
10657 single_nonzero_element (const_tree t)
10659 unsigned HOST_WIDE_INT nelts;
10660 unsigned int repeat_nelts;
10661 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10662 repeat_nelts = nelts;
10663 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10665 nelts = vector_cst_encoded_nelts (t);
10666 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10668 else
10669 return -1;
10671 int res = -1;
10672 for (unsigned int i = 0; i < nelts; ++i)
10674 tree elt = vector_cst_elt (t, i);
10675 if (!integer_zerop (elt) && !real_zerop (elt))
10677 if (res >= 0 || i >= repeat_nelts)
10678 return -1;
10679 res = i;
10682 return res;
10685 /* Build an empty statement at location LOC. */
10687 tree
10688 build_empty_stmt (location_t loc)
10690 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10691 SET_EXPR_LOCATION (t, loc);
10692 return t;
10696 /* Build an OMP clause with code CODE. LOC is the location of the
10697 clause. */
10699 tree
10700 build_omp_clause (location_t loc, enum omp_clause_code code)
10702 tree t;
10703 int size, length;
10705 length = omp_clause_num_ops[code];
10706 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10708 record_node_allocation_statistics (OMP_CLAUSE, size);
10710 t = (tree) ggc_internal_alloc (size);
10711 memset (t, 0, size);
10712 TREE_SET_CODE (t, OMP_CLAUSE);
10713 OMP_CLAUSE_SET_CODE (t, code);
10714 OMP_CLAUSE_LOCATION (t) = loc;
10716 return t;
10719 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10720 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10721 Except for the CODE and operand count field, other storage for the
10722 object is initialized to zeros. */
10724 tree
10725 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10727 tree t;
10728 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10730 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10731 gcc_assert (len >= 1);
10733 record_node_allocation_statistics (code, length);
10735 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10737 TREE_SET_CODE (t, code);
10739 /* Can't use TREE_OPERAND to store the length because if checking is
10740 enabled, it will try to check the length before we store it. :-P */
10741 t->exp.operands[0] = build_int_cst (sizetype, len);
10743 return t;
10746 /* Helper function for build_call_* functions; build a CALL_EXPR with
10747 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10748 the argument slots. */
10750 static tree
10751 build_call_1 (tree return_type, tree fn, int nargs)
10753 tree t;
10755 t = build_vl_exp (CALL_EXPR, nargs + 3);
10756 TREE_TYPE (t) = return_type;
10757 CALL_EXPR_FN (t) = fn;
10758 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10760 return t;
10763 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10764 FN and a null static chain slot. NARGS is the number of call arguments
10765 which are specified as "..." arguments. */
10767 tree
10768 build_call_nary (tree return_type, tree fn, int nargs, ...)
10770 tree ret;
10771 va_list args;
10772 va_start (args, nargs);
10773 ret = build_call_valist (return_type, fn, nargs, args);
10774 va_end (args);
10775 return ret;
10778 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10779 FN and a null static chain slot. NARGS is the number of call arguments
10780 which are specified as a va_list ARGS. */
10782 tree
10783 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10785 tree t;
10786 int i;
10788 t = build_call_1 (return_type, fn, nargs);
10789 for (i = 0; i < nargs; i++)
10790 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10791 process_call_operands (t);
10792 return t;
10795 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10796 FN and a null static chain slot. NARGS is the number of call arguments
10797 which are specified as a tree array ARGS. */
10799 tree
10800 build_call_array_loc (location_t loc, tree return_type, tree fn,
10801 int nargs, const tree *args)
10803 tree t;
10804 int i;
10806 t = build_call_1 (return_type, fn, nargs);
10807 for (i = 0; i < nargs; i++)
10808 CALL_EXPR_ARG (t, i) = args[i];
10809 process_call_operands (t);
10810 SET_EXPR_LOCATION (t, loc);
10811 return t;
10814 /* Like build_call_array, but takes a vec. */
10816 tree
10817 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10819 tree ret, t;
10820 unsigned int ix;
10822 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10823 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10824 CALL_EXPR_ARG (ret, ix) = t;
10825 process_call_operands (ret);
10826 return ret;
10829 /* Conveniently construct a function call expression. FNDECL names the
10830 function to be called and N arguments are passed in the array
10831 ARGARRAY. */
10833 tree
10834 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10836 tree fntype = TREE_TYPE (fndecl);
10837 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10839 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10842 /* Conveniently construct a function call expression. FNDECL names the
10843 function to be called and the arguments are passed in the vector
10844 VEC. */
10846 tree
10847 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10849 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10850 vec_safe_address (vec));
10854 /* Conveniently construct a function call expression. FNDECL names the
10855 function to be called, N is the number of arguments, and the "..."
10856 parameters are the argument expressions. */
10858 tree
10859 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10861 va_list ap;
10862 tree *argarray = XALLOCAVEC (tree, n);
10863 int i;
10865 va_start (ap, n);
10866 for (i = 0; i < n; i++)
10867 argarray[i] = va_arg (ap, tree);
10868 va_end (ap);
10869 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10872 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10873 varargs macros aren't supported by all bootstrap compilers. */
10875 tree
10876 build_call_expr (tree fndecl, int n, ...)
10878 va_list ap;
10879 tree *argarray = XALLOCAVEC (tree, n);
10880 int i;
10882 va_start (ap, n);
10883 for (i = 0; i < n; i++)
10884 argarray[i] = va_arg (ap, tree);
10885 va_end (ap);
10886 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10889 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10890 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10891 It will get gimplified later into an ordinary internal function. */
10893 tree
10894 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10895 tree type, int n, const tree *args)
10897 tree t = build_call_1 (type, NULL_TREE, n);
10898 for (int i = 0; i < n; ++i)
10899 CALL_EXPR_ARG (t, i) = args[i];
10900 SET_EXPR_LOCATION (t, loc);
10901 CALL_EXPR_IFN (t) = ifn;
10902 process_call_operands (t);
10903 return t;
10906 /* Build internal call expression. This is just like CALL_EXPR, except
10907 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10908 internal function. */
10910 tree
10911 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10912 tree type, int n, ...)
10914 va_list ap;
10915 tree *argarray = XALLOCAVEC (tree, n);
10916 int i;
10918 va_start (ap, n);
10919 for (i = 0; i < n; i++)
10920 argarray[i] = va_arg (ap, tree);
10921 va_end (ap);
10922 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10925 /* Return a function call to FN, if the target is guaranteed to support it,
10926 or null otherwise.
10928 N is the number of arguments, passed in the "...", and TYPE is the
10929 type of the return value. */
10931 tree
10932 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10933 int n, ...)
10935 va_list ap;
10936 tree *argarray = XALLOCAVEC (tree, n);
10937 int i;
10939 va_start (ap, n);
10940 for (i = 0; i < n; i++)
10941 argarray[i] = va_arg (ap, tree);
10942 va_end (ap);
10943 if (internal_fn_p (fn))
10945 internal_fn ifn = as_internal_fn (fn);
10946 if (direct_internal_fn_p (ifn))
10948 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10949 if (!direct_internal_fn_supported_p (ifn, types,
10950 OPTIMIZE_FOR_BOTH))
10951 return NULL_TREE;
10953 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10955 else
10957 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10958 if (!fndecl)
10959 return NULL_TREE;
10960 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10964 /* Return a function call to the appropriate builtin alloca variant.
10966 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10967 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10968 bound for SIZE in case it is not a fixed value. */
10970 tree
10971 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10973 if (max_size >= 0)
10975 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10976 return
10977 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10979 else if (align > 0)
10981 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10982 return build_call_expr (t, 2, size, size_int (align));
10984 else
10986 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10987 return build_call_expr (t, 1, size);
10991 /* The built-in decl to use to mark code points believed to be unreachable.
10992 Typically __builtin_unreachable, but __builtin_trap if
10993 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10994 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10995 appropriate ubsan function. When building a call directly, use
10996 {gimple_},build_builtin_unreachable instead. */
10998 tree
10999 builtin_decl_unreachable ()
11001 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
11003 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
11004 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
11005 : flag_unreachable_traps)
11006 fncode = BUILT_IN_UNREACHABLE_TRAP;
11007 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
11008 in the sanopt pass. */
11010 return builtin_decl_explicit (fncode);
11013 /* Build a call to __builtin_unreachable, possibly rewritten by
11014 -fsanitize=unreachable. Use this rather than the above when practical. */
11016 tree
11017 build_builtin_unreachable (location_t loc)
11019 tree data = NULL_TREE;
11020 tree fn = sanitize_unreachable_fn (&data, loc);
11021 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
11024 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11025 if SIZE == -1) and return a tree node representing char* pointer to
11026 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
11027 the STRING_CST value is the LEN bytes at STR (the representation
11028 of the string, which may be wide). Otherwise it's all zeros. */
11030 tree
11031 build_string_literal (unsigned len, const char *str /* = NULL */,
11032 tree eltype /* = char_type_node */,
11033 unsigned HOST_WIDE_INT size /* = -1 */)
11035 tree t = build_string (len, str);
11036 /* Set the maximum valid index based on the string length or SIZE. */
11037 unsigned HOST_WIDE_INT maxidx
11038 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11040 tree index = build_index_type (size_int (maxidx));
11041 eltype = build_type_variant (eltype, 1, 0);
11042 tree type = build_array_type (eltype, index);
11043 TREE_TYPE (t) = type;
11044 TREE_CONSTANT (t) = 1;
11045 TREE_READONLY (t) = 1;
11046 TREE_STATIC (t) = 1;
11048 type = build_pointer_type (eltype);
11049 t = build1 (ADDR_EXPR, type,
11050 build4 (ARRAY_REF, eltype,
11051 t, integer_zero_node, NULL_TREE, NULL_TREE));
11052 return t;
11057 /* Return true if T (assumed to be a DECL) must be assigned a memory
11058 location. */
11060 bool
11061 needs_to_live_in_memory (const_tree t)
11063 return (TREE_ADDRESSABLE (t)
11064 || is_global_var (t)
11065 || (TREE_CODE (t) == RESULT_DECL
11066 && !DECL_BY_REFERENCE (t)
11067 && aggregate_value_p (t, current_function_decl)));
11070 /* Return value of a constant X and sign-extend it. */
11072 HOST_WIDE_INT
11073 int_cst_value (const_tree x)
11075 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11076 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11078 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11079 gcc_assert (cst_and_fits_in_hwi (x));
11081 if (bits < HOST_BITS_PER_WIDE_INT)
11083 bool negative = ((val >> (bits - 1)) & 1) != 0;
11084 if (negative)
11085 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11086 else
11087 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11090 return val;
11093 /* If TYPE is an integral or pointer type, return an integer type with
11094 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11095 if TYPE is already an integer type of signedness UNSIGNEDP.
11096 If TYPE is a floating-point type, return an integer type with the same
11097 bitsize and with the signedness given by UNSIGNEDP; this is useful
11098 when doing bit-level operations on a floating-point value. */
11100 tree
11101 signed_or_unsigned_type_for (int unsignedp, tree type)
11103 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11104 return type;
11106 if (TREE_CODE (type) == VECTOR_TYPE)
11108 tree inner = TREE_TYPE (type);
11109 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11110 if (!inner2)
11111 return NULL_TREE;
11112 if (inner == inner2)
11113 return type;
11114 machine_mode new_mode;
11115 if (VECTOR_MODE_P (TYPE_MODE (type))
11116 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11117 return build_vector_type_for_mode (inner2, new_mode);
11118 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11121 if (TREE_CODE (type) == COMPLEX_TYPE)
11123 tree inner = TREE_TYPE (type);
11124 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11125 if (!inner2)
11126 return NULL_TREE;
11127 if (inner == inner2)
11128 return type;
11129 return build_complex_type (inner2);
11132 unsigned int bits;
11133 if (INTEGRAL_TYPE_P (type)
11134 || POINTER_TYPE_P (type)
11135 || TREE_CODE (type) == OFFSET_TYPE)
11136 bits = TYPE_PRECISION (type);
11137 else if (TREE_CODE (type) == REAL_TYPE)
11138 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11139 else
11140 return NULL_TREE;
11142 if (TREE_CODE (type) == BITINT_TYPE && (unsignedp || bits > 1))
11143 return build_bitint_type (bits, unsignedp);
11144 return build_nonstandard_integer_type (bits, unsignedp);
11147 /* If TYPE is an integral or pointer type, return an integer type with
11148 the same precision which is unsigned, or itself if TYPE is already an
11149 unsigned integer type. If TYPE is a floating-point type, return an
11150 unsigned integer type with the same bitsize as TYPE. */
11152 tree
11153 unsigned_type_for (tree type)
11155 return signed_or_unsigned_type_for (1, type);
11158 /* If TYPE is an integral or pointer type, return an integer type with
11159 the same precision which is signed, or itself if TYPE is already a
11160 signed integer type. If TYPE is a floating-point type, return a
11161 signed integer type with the same bitsize as TYPE. */
11163 tree
11164 signed_type_for (tree type)
11166 return signed_or_unsigned_type_for (0, type);
11169 /* - For VECTOR_TYPEs:
11170 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11171 - The number of elements must match (known_eq).
11172 - targetm.vectorize.get_mask_mode exists, and exactly
11173 the same mode as the truth type.
11174 - Otherwise, the truth type must be a BOOLEAN_TYPE
11175 or useless_type_conversion_p to BOOLEAN_TYPE. */
11176 bool
11177 is_truth_type_for (tree type, tree truth_type)
11179 machine_mode mask_mode = TYPE_MODE (truth_type);
11180 machine_mode vmode = TYPE_MODE (type);
11181 machine_mode tmask_mode;
11183 if (TREE_CODE (type) == VECTOR_TYPE)
11185 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11186 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11187 TYPE_VECTOR_SUBPARTS (truth_type))
11188 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11189 && tmask_mode == mask_mode)
11190 return true;
11192 return false;
11195 return useless_type_conversion_p (boolean_type_node, truth_type);
11198 /* If TYPE is a vector type, return a signed integer vector type with the
11199 same width and number of subparts. Otherwise return boolean_type_node. */
11201 tree
11202 truth_type_for (tree type)
11204 if (TREE_CODE (type) == VECTOR_TYPE)
11206 if (VECTOR_BOOLEAN_TYPE_P (type))
11207 return type;
11208 return build_truth_vector_type_for (type);
11210 else
11211 return boolean_type_node;
11214 /* Returns the largest value obtainable by casting something in INNER type to
11215 OUTER type. */
11217 tree
11218 upper_bound_in_type (tree outer, tree inner)
11220 unsigned int det = 0;
11221 unsigned oprec = TYPE_PRECISION (outer);
11222 unsigned iprec = TYPE_PRECISION (inner);
11223 unsigned prec;
11225 /* Compute a unique number for every combination. */
11226 det |= (oprec > iprec) ? 4 : 0;
11227 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11228 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11230 /* Determine the exponent to use. */
11231 switch (det)
11233 case 0:
11234 case 1:
11235 /* oprec <= iprec, outer: signed, inner: don't care. */
11236 prec = oprec - 1;
11237 break;
11238 case 2:
11239 case 3:
11240 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11241 prec = oprec;
11242 break;
11243 case 4:
11244 /* oprec > iprec, outer: signed, inner: signed. */
11245 prec = iprec - 1;
11246 break;
11247 case 5:
11248 /* oprec > iprec, outer: signed, inner: unsigned. */
11249 prec = iprec;
11250 break;
11251 case 6:
11252 /* oprec > iprec, outer: unsigned, inner: signed. */
11253 prec = oprec;
11254 break;
11255 case 7:
11256 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11257 prec = iprec;
11258 break;
11259 default:
11260 gcc_unreachable ();
11263 return wide_int_to_tree (outer,
11264 wi::mask (prec, false, TYPE_PRECISION (outer)));
11267 /* Returns the smallest value obtainable by casting something in INNER type to
11268 OUTER type. */
11270 tree
11271 lower_bound_in_type (tree outer, tree inner)
11273 unsigned oprec = TYPE_PRECISION (outer);
11274 unsigned iprec = TYPE_PRECISION (inner);
11276 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11277 and obtain 0. */
11278 if (TYPE_UNSIGNED (outer)
11279 /* If we are widening something of an unsigned type, OUTER type
11280 contains all values of INNER type. In particular, both INNER
11281 and OUTER types have zero in common. */
11282 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11283 return build_int_cst (outer, 0);
11284 else
11286 /* If we are widening a signed type to another signed type, we
11287 want to obtain -2^^(iprec-1). If we are keeping the
11288 precision or narrowing to a signed type, we want to obtain
11289 -2^(oprec-1). */
11290 unsigned prec = oprec > iprec ? iprec : oprec;
11291 return wide_int_to_tree (outer,
11292 wi::mask (prec - 1, true,
11293 TYPE_PRECISION (outer)));
11297 /* Return true if two operands that are suitable for PHI nodes are
11298 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11299 SSA_NAME or invariant. Note that this is strictly an optimization.
11300 That is, callers of this function can directly call operand_equal_p
11301 and get the same result, only slower. */
11303 bool
11304 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11306 if (arg0 == arg1)
11307 return true;
11308 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11309 return false;
11310 return operand_equal_p (arg0, arg1, 0);
11313 /* Returns number of zeros at the end of binary representation of X. */
11315 tree
11316 num_ending_zeros (const_tree x)
11318 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11322 #define WALK_SUBTREE(NODE) \
11323 do \
11325 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11326 if (result) \
11327 return result; \
11329 while (0)
11331 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11332 be walked whenever a type is seen in the tree. Rest of operands and return
11333 value are as for walk_tree. */
11335 static tree
11336 walk_type_fields (tree type, walk_tree_fn func, void *data,
11337 hash_set<tree> *pset, walk_tree_lh lh)
11339 tree result = NULL_TREE;
11341 switch (TREE_CODE (type))
11343 case POINTER_TYPE:
11344 case REFERENCE_TYPE:
11345 case VECTOR_TYPE:
11346 /* We have to worry about mutually recursive pointers. These can't
11347 be written in C. They can in Ada. It's pathological, but
11348 there's an ACATS test (c38102a) that checks it. Deal with this
11349 by checking if we're pointing to another pointer, that one
11350 points to another pointer, that one does too, and we have no htab.
11351 If so, get a hash table. We check three levels deep to avoid
11352 the cost of the hash table if we don't need one. */
11353 if (POINTER_TYPE_P (TREE_TYPE (type))
11354 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11355 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11356 && !pset)
11358 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11359 func, data);
11360 if (result)
11361 return result;
11363 break;
11366 /* fall through */
11368 case COMPLEX_TYPE:
11369 WALK_SUBTREE (TREE_TYPE (type));
11370 break;
11372 case METHOD_TYPE:
11373 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11375 /* Fall through. */
11377 case FUNCTION_TYPE:
11378 WALK_SUBTREE (TREE_TYPE (type));
11380 tree arg;
11382 /* We never want to walk into default arguments. */
11383 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11384 WALK_SUBTREE (TREE_VALUE (arg));
11386 break;
11388 case ARRAY_TYPE:
11389 /* Don't follow this nodes's type if a pointer for fear that
11390 we'll have infinite recursion. If we have a PSET, then we
11391 need not fear. */
11392 if (pset
11393 || (!POINTER_TYPE_P (TREE_TYPE (type))
11394 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11395 WALK_SUBTREE (TREE_TYPE (type));
11396 WALK_SUBTREE (TYPE_DOMAIN (type));
11397 break;
11399 case OFFSET_TYPE:
11400 WALK_SUBTREE (TREE_TYPE (type));
11401 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11402 break;
11404 default:
11405 break;
11408 return NULL_TREE;
11411 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11412 called with the DATA and the address of each sub-tree. If FUNC returns a
11413 non-NULL value, the traversal is stopped, and the value returned by FUNC
11414 is returned. If PSET is non-NULL it is used to record the nodes visited,
11415 and to avoid visiting a node more than once. */
11417 tree
11418 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11419 hash_set<tree> *pset, walk_tree_lh lh)
11421 #define WALK_SUBTREE_TAIL(NODE) \
11422 do \
11424 tp = & (NODE); \
11425 goto tail_recurse; \
11427 while (0)
11429 tail_recurse:
11430 /* Skip empty subtrees. */
11431 if (!*tp)
11432 return NULL_TREE;
11434 /* Don't walk the same tree twice, if the user has requested
11435 that we avoid doing so. */
11436 if (pset && pset->add (*tp))
11437 return NULL_TREE;
11439 /* Call the function. */
11440 int walk_subtrees = 1;
11441 tree result = (*func) (tp, &walk_subtrees, data);
11443 /* If we found something, return it. */
11444 if (result)
11445 return result;
11447 tree t = *tp;
11448 tree_code code = TREE_CODE (t);
11450 /* Even if we didn't, FUNC may have decided that there was nothing
11451 interesting below this point in the tree. */
11452 if (!walk_subtrees)
11454 /* But we still need to check our siblings. */
11455 if (code == TREE_LIST)
11456 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11457 else if (code == OMP_CLAUSE)
11458 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11459 else
11460 return NULL_TREE;
11463 if (lh)
11465 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11466 if (result || !walk_subtrees)
11467 return result;
11470 switch (code)
11472 case ERROR_MARK:
11473 case IDENTIFIER_NODE:
11474 case INTEGER_CST:
11475 case REAL_CST:
11476 case FIXED_CST:
11477 case STRING_CST:
11478 case BLOCK:
11479 case PLACEHOLDER_EXPR:
11480 case SSA_NAME:
11481 case FIELD_DECL:
11482 case RESULT_DECL:
11483 /* None of these have subtrees other than those already walked
11484 above. */
11485 break;
11487 case TREE_LIST:
11488 WALK_SUBTREE (TREE_VALUE (t));
11489 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11491 case TREE_VEC:
11493 int len = TREE_VEC_LENGTH (t);
11495 if (len == 0)
11496 break;
11498 /* Walk all elements but the last. */
11499 for (int i = 0; i < len - 1; ++i)
11500 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11502 /* Now walk the last one as a tail call. */
11503 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11506 case VECTOR_CST:
11508 unsigned len = vector_cst_encoded_nelts (t);
11509 if (len == 0)
11510 break;
11511 /* Walk all elements but the last. */
11512 for (unsigned i = 0; i < len - 1; ++i)
11513 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11514 /* Now walk the last one as a tail call. */
11515 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11518 case COMPLEX_CST:
11519 WALK_SUBTREE (TREE_REALPART (t));
11520 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11522 case CONSTRUCTOR:
11524 unsigned HOST_WIDE_INT idx;
11525 constructor_elt *ce;
11527 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11528 idx++)
11529 WALK_SUBTREE (ce->value);
11531 break;
11533 case SAVE_EXPR:
11534 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11536 case BIND_EXPR:
11538 tree decl;
11539 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11541 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11542 into declarations that are just mentioned, rather than
11543 declared; they don't really belong to this part of the tree.
11544 And, we can see cycles: the initializer for a declaration
11545 can refer to the declaration itself. */
11546 WALK_SUBTREE (DECL_INITIAL (decl));
11547 WALK_SUBTREE (DECL_SIZE (decl));
11548 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11550 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11553 case STATEMENT_LIST:
11555 tree_stmt_iterator i;
11556 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11557 WALK_SUBTREE (*tsi_stmt_ptr (i));
11559 break;
11561 case OMP_CLAUSE:
11563 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11564 for (int i = 0; i < len; i++)
11565 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11566 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11569 case TARGET_EXPR:
11571 int i, len;
11573 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11574 But, we only want to walk once. */
11575 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11576 for (i = 0; i < len; ++i)
11577 WALK_SUBTREE (TREE_OPERAND (t, i));
11578 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11581 case DECL_EXPR:
11582 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11583 defining. We only want to walk into these fields of a type in this
11584 case and not in the general case of a mere reference to the type.
11586 The criterion is as follows: if the field can be an expression, it
11587 must be walked only here. This should be in keeping with the fields
11588 that are directly gimplified in gimplify_type_sizes in order for the
11589 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11590 variable-sized types.
11592 Note that DECLs get walked as part of processing the BIND_EXPR. */
11593 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11595 /* Call the function for the decl so e.g. copy_tree_body_r can
11596 replace it with the remapped one. */
11597 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11598 if (result || !walk_subtrees)
11599 return result;
11601 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11602 if (TREE_CODE (*type_p) == ERROR_MARK)
11603 return NULL_TREE;
11605 /* Call the function for the type. See if it returns anything or
11606 doesn't want us to continue. If we are to continue, walk both
11607 the normal fields and those for the declaration case. */
11608 result = (*func) (type_p, &walk_subtrees, data);
11609 if (result || !walk_subtrees)
11610 return result;
11612 tree type = *type_p;
11614 /* But do not walk a pointed-to type since it may itself need to
11615 be walked in the declaration case if it isn't anonymous. */
11616 if (!POINTER_TYPE_P (type))
11618 result = walk_type_fields (type, func, data, pset, lh);
11619 if (result)
11620 return result;
11623 /* If this is a record type, also walk the fields. */
11624 if (RECORD_OR_UNION_TYPE_P (type))
11626 tree field;
11628 for (field = TYPE_FIELDS (type); field;
11629 field = DECL_CHAIN (field))
11631 /* We'd like to look at the type of the field, but we can
11632 easily get infinite recursion. So assume it's pointed
11633 to elsewhere in the tree. Also, ignore things that
11634 aren't fields. */
11635 if (TREE_CODE (field) != FIELD_DECL)
11636 continue;
11638 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11639 WALK_SUBTREE (DECL_SIZE (field));
11640 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11641 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11642 WALK_SUBTREE (DECL_QUALIFIER (field));
11646 /* Same for scalar types. */
11647 else if (TREE_CODE (type) == BOOLEAN_TYPE
11648 || TREE_CODE (type) == ENUMERAL_TYPE
11649 || TREE_CODE (type) == INTEGER_TYPE
11650 || TREE_CODE (type) == FIXED_POINT_TYPE
11651 || TREE_CODE (type) == REAL_TYPE)
11653 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11654 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11657 WALK_SUBTREE (TYPE_SIZE (type));
11658 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11660 /* FALLTHRU */
11662 default:
11663 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11665 int i, len;
11667 /* Walk over all the sub-trees of this operand. */
11668 len = TREE_OPERAND_LENGTH (t);
11670 /* Go through the subtrees. We need to do this in forward order so
11671 that the scope of a FOR_EXPR is handled properly. */
11672 if (len)
11674 for (i = 0; i < len - 1; ++i)
11675 WALK_SUBTREE (TREE_OPERAND (t, i));
11676 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11679 /* If this is a type, walk the needed fields in the type. */
11680 else if (TYPE_P (t))
11681 return walk_type_fields (t, func, data, pset, lh);
11682 break;
11685 /* We didn't find what we were looking for. */
11686 return NULL_TREE;
11688 #undef WALK_SUBTREE_TAIL
11690 #undef WALK_SUBTREE
11692 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11694 tree
11695 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11696 walk_tree_lh lh)
11698 tree result;
11700 hash_set<tree> pset;
11701 result = walk_tree_1 (tp, func, data, &pset, lh);
11702 return result;
11706 tree
11707 tree_block (tree t)
11709 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11711 if (IS_EXPR_CODE_CLASS (c))
11712 return LOCATION_BLOCK (t->exp.locus);
11713 gcc_unreachable ();
11714 return NULL;
11717 void
11718 tree_set_block (tree t, tree b)
11720 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11722 if (IS_EXPR_CODE_CLASS (c))
11724 t->exp.locus = set_block (t->exp.locus, b);
11726 else
11727 gcc_unreachable ();
11730 /* Create a nameless artificial label and put it in the current
11731 function context. The label has a location of LOC. Returns the
11732 newly created label. */
11734 tree
11735 create_artificial_label (location_t loc)
11737 tree lab = build_decl (loc,
11738 LABEL_DECL, NULL_TREE, void_type_node);
11740 DECL_ARTIFICIAL (lab) = 1;
11741 DECL_IGNORED_P (lab) = 1;
11742 DECL_CONTEXT (lab) = current_function_decl;
11743 return lab;
11746 /* Given a tree, try to return a useful variable name that we can use
11747 to prefix a temporary that is being assigned the value of the tree.
11748 I.E. given <temp> = &A, return A. */
11750 const char *
11751 get_name (tree t)
11753 tree stripped_decl;
11755 stripped_decl = t;
11756 STRIP_NOPS (stripped_decl);
11757 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11758 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11759 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11761 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11762 if (!name)
11763 return NULL;
11764 return IDENTIFIER_POINTER (name);
11766 else
11768 switch (TREE_CODE (stripped_decl))
11770 case ADDR_EXPR:
11771 return get_name (TREE_OPERAND (stripped_decl, 0));
11772 default:
11773 return NULL;
11778 /* Return true if TYPE has a variable argument list. */
11780 bool
11781 stdarg_p (const_tree fntype)
11783 function_args_iterator args_iter;
11784 tree n = NULL_TREE, t;
11786 if (!fntype)
11787 return false;
11789 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11790 return true;
11792 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11794 n = t;
11797 return n != NULL_TREE && n != void_type_node;
11800 /* Return true if TYPE has a prototype. */
11802 bool
11803 prototype_p (const_tree fntype)
11805 tree t;
11807 gcc_assert (fntype != NULL_TREE);
11809 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11810 return true;
11812 t = TYPE_ARG_TYPES (fntype);
11813 return (t != NULL_TREE);
11816 /* If BLOCK is inlined from an __attribute__((__artificial__))
11817 routine, return pointer to location from where it has been
11818 called. */
11819 location_t *
11820 block_nonartificial_location (tree block)
11822 location_t *ret = NULL;
11824 while (block && TREE_CODE (block) == BLOCK
11825 && BLOCK_ABSTRACT_ORIGIN (block))
11827 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11828 if (TREE_CODE (ao) == FUNCTION_DECL)
11830 /* If AO is an artificial inline, point RET to the
11831 call site locus at which it has been inlined and continue
11832 the loop, in case AO's caller is also an artificial
11833 inline. */
11834 if (DECL_DECLARED_INLINE_P (ao)
11835 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11836 ret = &BLOCK_SOURCE_LOCATION (block);
11837 else
11838 break;
11840 else if (TREE_CODE (ao) != BLOCK)
11841 break;
11843 block = BLOCK_SUPERCONTEXT (block);
11845 return ret;
11849 /* If EXP is inlined from an __attribute__((__artificial__))
11850 function, return the location of the original call expression. */
11852 location_t
11853 tree_nonartificial_location (tree exp)
11855 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11857 if (loc)
11858 return *loc;
11859 else
11860 return EXPR_LOCATION (exp);
11863 /* Return the location into which EXP has been inlined. Analogous
11864 to tree_nonartificial_location() above but not limited to artificial
11865 functions declared inline. If SYSTEM_HEADER is true, return
11866 the macro expansion point of the location if it's in a system header */
11868 location_t
11869 tree_inlined_location (tree exp, bool system_header /* = true */)
11871 location_t loc = UNKNOWN_LOCATION;
11873 tree block = TREE_BLOCK (exp);
11875 while (block && TREE_CODE (block) == BLOCK
11876 && BLOCK_ABSTRACT_ORIGIN (block))
11878 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11879 if (TREE_CODE (ao) == FUNCTION_DECL)
11880 loc = BLOCK_SOURCE_LOCATION (block);
11881 else if (TREE_CODE (ao) != BLOCK)
11882 break;
11884 block = BLOCK_SUPERCONTEXT (block);
11887 if (loc == UNKNOWN_LOCATION)
11889 loc = EXPR_LOCATION (exp);
11890 if (system_header)
11891 /* Only consider macro expansion when the block traversal failed
11892 to find a location. Otherwise it's not relevant. */
11893 return expansion_point_location_if_in_system_header (loc);
11896 return loc;
11899 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11900 nodes. */
11902 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11904 hashval_t
11905 cl_option_hasher::hash (tree x)
11907 const_tree const t = x;
11909 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11910 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11911 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11912 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11913 else
11914 gcc_unreachable ();
11917 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11918 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11919 same. */
11921 bool
11922 cl_option_hasher::equal (tree x, tree y)
11924 const_tree const xt = x;
11925 const_tree const yt = y;
11927 if (TREE_CODE (xt) != TREE_CODE (yt))
11928 return false;
11930 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11931 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11932 TREE_OPTIMIZATION (yt));
11933 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11934 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11935 TREE_TARGET_OPTION (yt));
11936 else
11937 gcc_unreachable ();
11940 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11942 tree
11943 build_optimization_node (struct gcc_options *opts,
11944 struct gcc_options *opts_set)
11946 tree t;
11948 /* Use the cache of optimization nodes. */
11950 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11951 opts, opts_set);
11953 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11954 t = *slot;
11955 if (!t)
11957 /* Insert this one into the hash table. */
11958 t = cl_optimization_node;
11959 *slot = t;
11961 /* Make a new node for next time round. */
11962 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11965 return t;
11968 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11970 tree
11971 build_target_option_node (struct gcc_options *opts,
11972 struct gcc_options *opts_set)
11974 tree t;
11976 /* Use the cache of optimization nodes. */
11978 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11979 opts, opts_set);
11981 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11982 t = *slot;
11983 if (!t)
11985 /* Insert this one into the hash table. */
11986 t = cl_target_option_node;
11987 *slot = t;
11989 /* Make a new node for next time round. */
11990 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11993 return t;
11996 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11997 so that they aren't saved during PCH writing. */
11999 void
12000 prepare_target_option_nodes_for_pch (void)
12002 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12003 for (; iter != cl_option_hash_table->end (); ++iter)
12004 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12005 TREE_TARGET_GLOBALS (*iter) = NULL;
12008 /* Determine the "ultimate origin" of a block. */
12010 tree
12011 block_ultimate_origin (const_tree block)
12013 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12015 if (origin == NULL_TREE)
12016 return NULL_TREE;
12017 else
12019 gcc_checking_assert ((DECL_P (origin)
12020 && DECL_ORIGIN (origin) == origin)
12021 || BLOCK_ORIGIN (origin) == origin);
12022 return origin;
12026 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12027 no instruction. */
12029 bool
12030 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12032 /* Do not strip casts into or out of differing address spaces. */
12033 if (POINTER_TYPE_P (outer_type)
12034 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12036 if (!POINTER_TYPE_P (inner_type)
12037 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12038 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12039 return false;
12041 else if (POINTER_TYPE_P (inner_type)
12042 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12044 /* We already know that outer_type is not a pointer with
12045 a non-generic address space. */
12046 return false;
12049 /* Use precision rather then machine mode when we can, which gives
12050 the correct answer even for submode (bit-field) types. */
12051 if ((INTEGRAL_TYPE_P (outer_type)
12052 || POINTER_TYPE_P (outer_type)
12053 || TREE_CODE (outer_type) == OFFSET_TYPE)
12054 && (INTEGRAL_TYPE_P (inner_type)
12055 || POINTER_TYPE_P (inner_type)
12056 || TREE_CODE (inner_type) == OFFSET_TYPE))
12057 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12059 /* Otherwise fall back on comparing machine modes (e.g. for
12060 aggregate types, floats). */
12061 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12064 /* Return true iff conversion in EXP generates no instruction. Mark
12065 it inline so that we fully inline into the stripping functions even
12066 though we have two uses of this function. */
12068 static inline bool
12069 tree_nop_conversion (const_tree exp)
12071 tree outer_type, inner_type;
12073 if (location_wrapper_p (exp))
12074 return true;
12075 if (!CONVERT_EXPR_P (exp)
12076 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12077 return false;
12079 outer_type = TREE_TYPE (exp);
12080 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12081 if (!inner_type || inner_type == error_mark_node)
12082 return false;
12084 return tree_nop_conversion_p (outer_type, inner_type);
12087 /* Return true iff conversion in EXP generates no instruction. Don't
12088 consider conversions changing the signedness. */
12090 static bool
12091 tree_sign_nop_conversion (const_tree exp)
12093 tree outer_type, inner_type;
12095 if (!tree_nop_conversion (exp))
12096 return false;
12098 outer_type = TREE_TYPE (exp);
12099 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12101 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12102 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12105 /* Strip conversions from EXP according to tree_nop_conversion and
12106 return the resulting expression. */
12108 tree
12109 tree_strip_nop_conversions (tree exp)
12111 while (tree_nop_conversion (exp))
12112 exp = TREE_OPERAND (exp, 0);
12113 return exp;
12116 /* Strip conversions from EXP according to tree_sign_nop_conversion
12117 and return the resulting expression. */
12119 tree
12120 tree_strip_sign_nop_conversions (tree exp)
12122 while (tree_sign_nop_conversion (exp))
12123 exp = TREE_OPERAND (exp, 0);
12124 return exp;
12127 /* Avoid any floating point extensions from EXP. */
12128 tree
12129 strip_float_extensions (tree exp)
12131 tree sub, expt, subt;
12133 /* For floating point constant look up the narrowest type that can hold
12134 it properly and handle it like (type)(narrowest_type)constant.
12135 This way we can optimize for instance a=a*2.0 where "a" is float
12136 but 2.0 is double constant. */
12137 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12139 REAL_VALUE_TYPE orig;
12140 tree type = NULL;
12142 orig = TREE_REAL_CST (exp);
12143 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12144 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12145 type = float_type_node;
12146 else if (TYPE_PRECISION (TREE_TYPE (exp))
12147 > TYPE_PRECISION (double_type_node)
12148 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12149 type = double_type_node;
12150 if (type)
12151 return build_real_truncate (type, orig);
12154 if (!CONVERT_EXPR_P (exp))
12155 return exp;
12157 sub = TREE_OPERAND (exp, 0);
12158 subt = TREE_TYPE (sub);
12159 expt = TREE_TYPE (exp);
12161 if (!FLOAT_TYPE_P (subt))
12162 return exp;
12164 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12165 return exp;
12167 if (element_precision (subt) > element_precision (expt))
12168 return exp;
12170 return strip_float_extensions (sub);
12173 /* Strip out all handled components that produce invariant
12174 offsets. */
12176 const_tree
12177 strip_invariant_refs (const_tree op)
12179 while (handled_component_p (op))
12181 switch (TREE_CODE (op))
12183 case ARRAY_REF:
12184 case ARRAY_RANGE_REF:
12185 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12186 || TREE_OPERAND (op, 2) != NULL_TREE
12187 || TREE_OPERAND (op, 3) != NULL_TREE)
12188 return NULL;
12189 break;
12191 case COMPONENT_REF:
12192 if (TREE_OPERAND (op, 2) != NULL_TREE)
12193 return NULL;
12194 break;
12196 default:;
12198 op = TREE_OPERAND (op, 0);
12201 return op;
12204 /* Strip handled components with zero offset from OP. */
12206 tree
12207 strip_zero_offset_components (tree op)
12209 while (TREE_CODE (op) == COMPONENT_REF
12210 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12211 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12212 op = TREE_OPERAND (op, 0);
12213 return op;
12216 static GTY(()) tree gcc_eh_personality_decl;
12218 /* Return the GCC personality function decl. */
12220 tree
12221 lhd_gcc_personality (void)
12223 if (!gcc_eh_personality_decl)
12224 gcc_eh_personality_decl = build_personality_function ("gcc");
12225 return gcc_eh_personality_decl;
12228 /* TARGET is a call target of GIMPLE call statement
12229 (obtained by gimple_call_fn). Return true if it is
12230 OBJ_TYPE_REF representing an virtual call of C++ method.
12231 (As opposed to OBJ_TYPE_REF representing objc calls
12232 through a cast where middle-end devirtualization machinery
12233 can't apply.) FOR_DUMP_P is true when being called from
12234 the dump routines. */
12236 bool
12237 virtual_method_call_p (const_tree target, bool for_dump_p)
12239 if (TREE_CODE (target) != OBJ_TYPE_REF)
12240 return false;
12241 tree t = TREE_TYPE (target);
12242 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12243 t = TREE_TYPE (t);
12244 if (TREE_CODE (t) == FUNCTION_TYPE)
12245 return false;
12246 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12247 /* If we do not have BINFO associated, it means that type was built
12248 without devirtualization enabled. Do not consider this a virtual
12249 call. */
12250 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12251 return false;
12252 return true;
12255 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12257 static tree
12258 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12260 unsigned int i;
12261 tree base_binfo, b;
12263 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12264 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12265 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12266 return base_binfo;
12267 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12268 return b;
12269 return NULL;
12272 /* Try to find a base info of BINFO that would have its field decl at offset
12273 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12274 found, return, otherwise return NULL_TREE. */
12276 tree
12277 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12279 tree type = BINFO_TYPE (binfo);
12281 while (true)
12283 HOST_WIDE_INT pos, size;
12284 tree fld;
12285 int i;
12287 if (types_same_for_odr (type, expected_type))
12288 return binfo;
12289 if (maybe_lt (offset, 0))
12290 return NULL_TREE;
12292 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12294 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12295 continue;
12297 pos = int_bit_position (fld);
12298 size = tree_to_uhwi (DECL_SIZE (fld));
12299 if (known_in_range_p (offset, pos, size))
12300 break;
12302 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12303 return NULL_TREE;
12305 /* Offset 0 indicates the primary base, whose vtable contents are
12306 represented in the binfo for the derived class. */
12307 else if (maybe_ne (offset, 0))
12309 tree found_binfo = NULL, base_binfo;
12310 /* Offsets in BINFO are in bytes relative to the whole structure
12311 while POS is in bits relative to the containing field. */
12312 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12313 / BITS_PER_UNIT);
12315 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12316 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12317 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12319 found_binfo = base_binfo;
12320 break;
12322 if (found_binfo)
12323 binfo = found_binfo;
12324 else
12325 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12326 binfo_offset);
12329 type = TREE_TYPE (fld);
12330 offset -= pos;
12334 /* PR 84195: Replace control characters in "unescaped" with their
12335 escaped equivalents. Allow newlines if -fmessage-length has
12336 been set to a non-zero value. This is done here, rather than
12337 where the attribute is recorded as the message length can
12338 change between these two locations. */
12340 void
12341 escaped_string::escape (const char *unescaped)
12343 char *escaped;
12344 size_t i, new_i, len;
12346 if (m_owned)
12347 free (m_str);
12349 m_str = const_cast<char *> (unescaped);
12350 m_owned = false;
12352 if (unescaped == NULL || *unescaped == 0)
12353 return;
12355 len = strlen (unescaped);
12356 escaped = NULL;
12357 new_i = 0;
12359 for (i = 0; i < len; i++)
12361 char c = unescaped[i];
12363 if (!ISCNTRL (c))
12365 if (escaped)
12366 escaped[new_i++] = c;
12367 continue;
12370 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12372 if (escaped == NULL)
12374 /* We only allocate space for a new string if we
12375 actually encounter a control character that
12376 needs replacing. */
12377 escaped = (char *) xmalloc (len * 2 + 1);
12378 strncpy (escaped, unescaped, i);
12379 new_i = i;
12382 escaped[new_i++] = '\\';
12384 switch (c)
12386 case '\a': escaped[new_i++] = 'a'; break;
12387 case '\b': escaped[new_i++] = 'b'; break;
12388 case '\f': escaped[new_i++] = 'f'; break;
12389 case '\n': escaped[new_i++] = 'n'; break;
12390 case '\r': escaped[new_i++] = 'r'; break;
12391 case '\t': escaped[new_i++] = 't'; break;
12392 case '\v': escaped[new_i++] = 'v'; break;
12393 default: escaped[new_i++] = '?'; break;
12396 else if (escaped)
12397 escaped[new_i++] = c;
12400 if (escaped)
12402 escaped[new_i] = 0;
12403 m_str = escaped;
12404 m_owned = true;
12408 /* Warn about a use of an identifier which was marked deprecated. Returns
12409 whether a warning was given. */
12411 bool
12412 warn_deprecated_use (tree node, tree attr)
12414 escaped_string msg;
12416 if (node == 0 || !warn_deprecated_decl)
12417 return false;
12419 if (!attr)
12421 if (DECL_P (node))
12422 attr = DECL_ATTRIBUTES (node);
12423 else if (TYPE_P (node))
12425 tree decl = TYPE_STUB_DECL (node);
12426 if (decl)
12427 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12428 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12429 != NULL_TREE)
12431 node = TREE_TYPE (decl);
12432 attr = TYPE_ATTRIBUTES (node);
12437 if (attr)
12438 attr = lookup_attribute ("deprecated", attr);
12440 if (attr)
12441 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12443 bool w = false;
12444 if (DECL_P (node))
12446 auto_diagnostic_group d;
12447 if (msg)
12448 w = warning (OPT_Wdeprecated_declarations,
12449 "%qD is deprecated: %s", node, (const char *) msg);
12450 else
12451 w = warning (OPT_Wdeprecated_declarations,
12452 "%qD is deprecated", node);
12453 if (w)
12454 inform (DECL_SOURCE_LOCATION (node), "declared here");
12456 else if (TYPE_P (node))
12458 tree what = NULL_TREE;
12459 tree decl = TYPE_STUB_DECL (node);
12461 if (TYPE_NAME (node))
12463 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12464 what = TYPE_NAME (node);
12465 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12466 && DECL_NAME (TYPE_NAME (node)))
12467 what = DECL_NAME (TYPE_NAME (node));
12470 auto_diagnostic_group d;
12471 if (what)
12473 if (msg)
12474 w = warning (OPT_Wdeprecated_declarations,
12475 "%qE is deprecated: %s", what, (const char *) msg);
12476 else
12477 w = warning (OPT_Wdeprecated_declarations,
12478 "%qE is deprecated", what);
12480 else
12482 if (msg)
12483 w = warning (OPT_Wdeprecated_declarations,
12484 "type is deprecated: %s", (const char *) msg);
12485 else
12486 w = warning (OPT_Wdeprecated_declarations,
12487 "type is deprecated");
12490 if (w && decl)
12491 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12494 return w;
12497 /* Error out with an identifier which was marked 'unavailable'. */
12498 void
12499 error_unavailable_use (tree node, tree attr)
12501 escaped_string msg;
12503 if (node == 0)
12504 return;
12506 if (!attr)
12508 if (DECL_P (node))
12509 attr = DECL_ATTRIBUTES (node);
12510 else if (TYPE_P (node))
12512 tree decl = TYPE_STUB_DECL (node);
12513 if (decl)
12514 attr = lookup_attribute ("unavailable",
12515 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12519 if (attr)
12520 attr = lookup_attribute ("unavailable", attr);
12522 if (attr)
12523 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12525 if (DECL_P (node))
12527 auto_diagnostic_group d;
12528 if (msg)
12529 error ("%qD is unavailable: %s", node, (const char *) msg);
12530 else
12531 error ("%qD is unavailable", node);
12532 inform (DECL_SOURCE_LOCATION (node), "declared here");
12534 else if (TYPE_P (node))
12536 tree what = NULL_TREE;
12537 tree decl = TYPE_STUB_DECL (node);
12539 if (TYPE_NAME (node))
12541 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12542 what = TYPE_NAME (node);
12543 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12544 && DECL_NAME (TYPE_NAME (node)))
12545 what = DECL_NAME (TYPE_NAME (node));
12548 auto_diagnostic_group d;
12549 if (what)
12551 if (msg)
12552 error ("%qE is unavailable: %s", what, (const char *) msg);
12553 else
12554 error ("%qE is unavailable", what);
12556 else
12558 if (msg)
12559 error ("type is unavailable: %s", (const char *) msg);
12560 else
12561 error ("type is unavailable");
12564 if (decl)
12565 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12569 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12570 somewhere in it. */
12572 bool
12573 contains_bitfld_component_ref_p (const_tree ref)
12575 while (handled_component_p (ref))
12577 if (TREE_CODE (ref) == COMPONENT_REF
12578 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12579 return true;
12580 ref = TREE_OPERAND (ref, 0);
12583 return false;
12586 /* Try to determine whether a TRY_CATCH expression can fall through.
12587 This is a subroutine of block_may_fallthru. */
12589 static bool
12590 try_catch_may_fallthru (const_tree stmt)
12592 tree_stmt_iterator i;
12594 /* If the TRY block can fall through, the whole TRY_CATCH can
12595 fall through. */
12596 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12597 return true;
12599 switch (TREE_CODE (TREE_OPERAND (stmt, 1)))
12601 case CATCH_EXPR:
12602 /* See below. */
12603 return block_may_fallthru (CATCH_BODY (TREE_OPERAND (stmt, 1)));
12605 case EH_FILTER_EXPR:
12606 /* See below. */
12607 return block_may_fallthru (EH_FILTER_FAILURE (TREE_OPERAND (stmt, 1)));
12609 case STATEMENT_LIST:
12610 break;
12612 default:
12613 /* See below. */
12614 return false;
12617 i = tsi_start (TREE_OPERAND (stmt, 1));
12618 switch (TREE_CODE (tsi_stmt (i)))
12620 case CATCH_EXPR:
12621 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12622 catch expression and a body. The whole TRY_CATCH may fall
12623 through iff any of the catch bodies falls through. */
12624 for (; !tsi_end_p (i); tsi_next (&i))
12626 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12627 return true;
12629 return false;
12631 case EH_FILTER_EXPR:
12632 /* The exception filter expression only matters if there is an
12633 exception. If the exception does not match EH_FILTER_TYPES,
12634 we will execute EH_FILTER_FAILURE, and we will fall through
12635 if that falls through. If the exception does match
12636 EH_FILTER_TYPES, the stack unwinder will continue up the
12637 stack, so we will not fall through. We don't know whether we
12638 will throw an exception which matches EH_FILTER_TYPES or not,
12639 so we just ignore EH_FILTER_TYPES and assume that we might
12640 throw an exception which doesn't match. */
12641 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12643 default:
12644 /* This case represents statements to be executed when an
12645 exception occurs. Those statements are implicitly followed
12646 by a RESX statement to resume execution after the exception.
12647 So in this case the TRY_CATCH never falls through. */
12648 return false;
12652 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12653 need not be 100% accurate; simply be conservative and return true if we
12654 don't know. This is used only to avoid stupidly generating extra code.
12655 If we're wrong, we'll just delete the extra code later. */
12657 bool
12658 block_may_fallthru (const_tree block)
12660 /* This CONST_CAST is okay because expr_last returns its argument
12661 unmodified and we assign it to a const_tree. */
12662 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12664 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12666 case GOTO_EXPR:
12667 case RETURN_EXPR:
12668 /* Easy cases. If the last statement of the block implies
12669 control transfer, then we can't fall through. */
12670 return false;
12672 case SWITCH_EXPR:
12673 /* If there is a default: label or case labels cover all possible
12674 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12675 to some case label in all cases and all we care is whether the
12676 SWITCH_BODY falls through. */
12677 if (SWITCH_ALL_CASES_P (stmt))
12678 return block_may_fallthru (SWITCH_BODY (stmt));
12679 return true;
12681 case COND_EXPR:
12682 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12683 return true;
12684 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12686 case BIND_EXPR:
12687 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12689 case TRY_CATCH_EXPR:
12690 return try_catch_may_fallthru (stmt);
12692 case TRY_FINALLY_EXPR:
12693 /* The finally clause is always executed after the try clause,
12694 so if it does not fall through, then the try-finally will not
12695 fall through. Otherwise, if the try clause does not fall
12696 through, then when the finally clause falls through it will
12697 resume execution wherever the try clause was going. So the
12698 whole try-finally will only fall through if both the try
12699 clause and the finally clause fall through. */
12700 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12701 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12703 case EH_ELSE_EXPR:
12704 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12706 case MODIFY_EXPR:
12707 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12708 stmt = TREE_OPERAND (stmt, 1);
12709 else
12710 return true;
12711 /* FALLTHRU */
12713 case CALL_EXPR:
12714 /* Functions that do not return do not fall through. */
12715 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12717 case CLEANUP_POINT_EXPR:
12718 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12720 case TARGET_EXPR:
12721 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12723 case ERROR_MARK:
12724 return true;
12726 default:
12727 return lang_hooks.block_may_fallthru (stmt);
12731 /* True if we are using EH to handle cleanups. */
12732 static bool using_eh_for_cleanups_flag = false;
12734 /* This routine is called from front ends to indicate eh should be used for
12735 cleanups. */
12736 void
12737 using_eh_for_cleanups (void)
12739 using_eh_for_cleanups_flag = true;
12742 /* Query whether EH is used for cleanups. */
12743 bool
12744 using_eh_for_cleanups_p (void)
12746 return using_eh_for_cleanups_flag;
12749 /* Wrapper for tree_code_name to ensure that tree code is valid */
12750 const char *
12751 get_tree_code_name (enum tree_code code)
12753 const char *invalid = "<invalid tree code>";
12755 /* The tree_code enum promotes to signed, but we could be getting
12756 invalid values, so force an unsigned comparison. */
12757 if (unsigned (code) >= MAX_TREE_CODES)
12759 if ((unsigned)code == 0xa5a5)
12760 return "ggc_freed";
12761 return invalid;
12764 return tree_code_name[code];
12767 /* Drops the TREE_OVERFLOW flag from T. */
12769 tree
12770 drop_tree_overflow (tree t)
12772 gcc_checking_assert (TREE_OVERFLOW (t));
12774 /* For tree codes with a sharing machinery re-build the result. */
12775 if (poly_int_tree_p (t))
12776 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12778 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12779 and canonicalize the result. */
12780 if (TREE_CODE (t) == VECTOR_CST)
12782 tree_vector_builder builder;
12783 builder.new_unary_operation (TREE_TYPE (t), t, true);
12784 unsigned int count = builder.encoded_nelts ();
12785 for (unsigned int i = 0; i < count; ++i)
12787 tree elt = VECTOR_CST_ELT (t, i);
12788 if (TREE_OVERFLOW (elt))
12789 elt = drop_tree_overflow (elt);
12790 builder.quick_push (elt);
12792 return builder.build ();
12795 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12796 and drop the flag. */
12797 t = copy_node (t);
12798 TREE_OVERFLOW (t) = 0;
12800 /* For constants that contain nested constants, drop the flag
12801 from those as well. */
12802 if (TREE_CODE (t) == COMPLEX_CST)
12804 if (TREE_OVERFLOW (TREE_REALPART (t)))
12805 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12806 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12807 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12810 return t;
12813 /* Given a memory reference expression T, return its base address.
12814 The base address of a memory reference expression is the main
12815 object being referenced. For instance, the base address for
12816 'array[i].fld[j]' is 'array'. You can think of this as stripping
12817 away the offset part from a memory address.
12819 This function calls handled_component_p to strip away all the inner
12820 parts of the memory reference until it reaches the base object. */
12822 tree
12823 get_base_address (tree t)
12825 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12826 t = TREE_OPERAND (t, 0);
12827 while (handled_component_p (t))
12828 t = TREE_OPERAND (t, 0);
12830 if ((TREE_CODE (t) == MEM_REF
12831 || TREE_CODE (t) == TARGET_MEM_REF)
12832 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12833 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12835 return t;
12838 /* Return a tree of sizetype representing the size, in bytes, of the element
12839 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12841 tree
12842 array_ref_element_size (tree exp)
12844 tree aligned_size = TREE_OPERAND (exp, 3);
12845 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12846 location_t loc = EXPR_LOCATION (exp);
12848 /* If a size was specified in the ARRAY_REF, it's the size measured
12849 in alignment units of the element type. So multiply by that value. */
12850 if (aligned_size)
12852 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12853 sizetype from another type of the same width and signedness. */
12854 if (TREE_TYPE (aligned_size) != sizetype)
12855 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12856 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12857 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12860 /* Otherwise, take the size from that of the element type. Substitute
12861 any PLACEHOLDER_EXPR that we have. */
12862 else
12863 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12866 /* Return a tree representing the lower bound of the array mentioned in
12867 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12869 tree
12870 array_ref_low_bound (tree exp)
12872 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12874 /* If a lower bound is specified in EXP, use it. */
12875 if (TREE_OPERAND (exp, 2))
12876 return TREE_OPERAND (exp, 2);
12878 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12879 substituting for a PLACEHOLDER_EXPR as needed. */
12880 if (domain_type && TYPE_MIN_VALUE (domain_type))
12881 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12883 /* Otherwise, return a zero of the appropriate type. */
12884 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12885 return (idxtype == error_mark_node
12886 ? integer_zero_node : build_int_cst (idxtype, 0));
12889 /* Return a tree representing the upper bound of the array mentioned in
12890 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12892 tree
12893 array_ref_up_bound (tree exp)
12895 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12897 /* If there is a domain type and it has an upper bound, use it, substituting
12898 for a PLACEHOLDER_EXPR as needed. */
12899 if (domain_type && TYPE_MAX_VALUE (domain_type))
12900 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12902 /* Otherwise fail. */
12903 return NULL_TREE;
12906 /* Returns true if REF is an array reference, a component reference,
12907 or a memory reference to an array whose actual size might be larger
12908 than its upper bound implies, there are multiple cases:
12909 A. a ref to a flexible array member at the end of a structure;
12910 B. a ref to an array with a different type against the original decl;
12911 for example:
12913 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12914 (*((char(*)[16])&a[0]))[i+8]
12916 C. a ref to an array that was passed as a parameter;
12917 for example:
12919 int test (uint8_t *p, uint32_t t[1][1], int n) {
12920 for (int i = 0; i < 4; i++, p++)
12921 t[i][0] = ...;
12923 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12926 bool
12927 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12929 /* The TYPE for this array referece. */
12930 tree atype = NULL_TREE;
12931 /* The FIELD_DECL for the array field in the containing structure. */
12932 tree afield_decl = NULL_TREE;
12933 /* Whether this array is the trailing array of a structure. */
12934 bool is_trailing_array_tmp = false;
12935 if (!is_trailing_array)
12936 is_trailing_array = &is_trailing_array_tmp;
12938 if (TREE_CODE (ref) == ARRAY_REF
12939 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12941 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12942 ref = TREE_OPERAND (ref, 0);
12944 else if (TREE_CODE (ref) == COMPONENT_REF
12945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12947 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12948 afield_decl = TREE_OPERAND (ref, 1);
12950 else if (TREE_CODE (ref) == MEM_REF)
12952 tree arg = TREE_OPERAND (ref, 0);
12953 if (TREE_CODE (arg) == ADDR_EXPR)
12954 arg = TREE_OPERAND (arg, 0);
12955 tree argtype = TREE_TYPE (arg);
12956 if (TREE_CODE (argtype) == RECORD_TYPE)
12958 if (tree fld = last_field (argtype))
12960 atype = TREE_TYPE (fld);
12961 afield_decl = fld;
12962 if (TREE_CODE (atype) != ARRAY_TYPE)
12963 return false;
12964 if (VAR_P (arg) && DECL_SIZE (fld))
12965 return false;
12967 else
12968 return false;
12970 else
12971 return false;
12973 else
12974 return false;
12976 if (TREE_CODE (ref) == STRING_CST)
12977 return false;
12979 tree ref_to_array = ref;
12980 while (handled_component_p (ref))
12982 /* If the reference chain contains a component reference to a
12983 non-union type and there follows another field the reference
12984 is not at the end of a structure. */
12985 if (TREE_CODE (ref) == COMPONENT_REF)
12987 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12989 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12990 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12991 nextf = DECL_CHAIN (nextf);
12992 if (nextf)
12993 return false;
12996 /* If we have a multi-dimensional array we do not consider
12997 a non-innermost dimension as flex array if the whole
12998 multi-dimensional array is at struct end.
12999 Same for an array of aggregates with a trailing array
13000 member. */
13001 else if (TREE_CODE (ref) == ARRAY_REF)
13002 return false;
13003 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13005 /* If we view an underlying object as sth else then what we
13006 gathered up to now is what we have to rely on. */
13007 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13008 break;
13009 else
13010 gcc_unreachable ();
13012 ref = TREE_OPERAND (ref, 0);
13015 gcc_assert (!afield_decl
13016 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
13018 /* The array now is at struct end. Treat flexible array member as
13019 always subject to extend, even into just padding constrained by
13020 an underlying decl. */
13021 if (! TYPE_SIZE (atype)
13022 || ! TYPE_DOMAIN (atype)
13023 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13025 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13026 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13029 /* If the reference is based on a declared entity, the size of the array
13030 is constrained by its given domain. (Do not trust commons PR/69368). */
13031 ref = get_base_address (ref);
13032 if (ref
13033 && DECL_P (ref)
13034 && !(flag_unconstrained_commons
13035 && VAR_P (ref) && DECL_COMMON (ref))
13036 && DECL_SIZE_UNIT (ref)
13037 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13039 /* If the object itself is the array it is not at struct end. */
13040 if (DECL_P (ref_to_array))
13041 return false;
13043 /* Check whether the array domain covers all of the available
13044 padding. */
13045 poly_int64 offset;
13046 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13047 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13048 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13050 *is_trailing_array
13051 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13052 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13054 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13056 *is_trailing_array
13057 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13058 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13061 /* If at least one extra element fits it is a flexarray. */
13062 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13063 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13064 + 2)
13065 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13066 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13068 *is_trailing_array
13069 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13070 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13073 return false;
13076 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13077 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13081 /* Return a tree representing the offset, in bytes, of the field referenced
13082 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13084 tree
13085 component_ref_field_offset (tree exp)
13087 tree aligned_offset = TREE_OPERAND (exp, 2);
13088 tree field = TREE_OPERAND (exp, 1);
13089 location_t loc = EXPR_LOCATION (exp);
13091 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13092 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13093 value. */
13094 if (aligned_offset)
13096 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13097 sizetype from another type of the same width and signedness. */
13098 if (TREE_TYPE (aligned_offset) != sizetype)
13099 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13100 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13101 size_int (DECL_OFFSET_ALIGN (field)
13102 / BITS_PER_UNIT));
13105 /* Otherwise, take the offset from that of the field. Substitute
13106 any PLACEHOLDER_EXPR that we have. */
13107 else
13108 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13111 /* Given the initializer INIT, return the initializer for the field
13112 DECL if it exists, otherwise null. Used to obtain the initializer
13113 for a flexible array member and determine its size. */
13115 static tree
13116 get_initializer_for (tree init, tree decl)
13118 STRIP_NOPS (init);
13120 tree fld, fld_init;
13121 unsigned HOST_WIDE_INT i;
13122 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13124 if (decl == fld)
13125 return fld_init;
13127 if (TREE_CODE (fld) == CONSTRUCTOR)
13129 fld_init = get_initializer_for (fld_init, decl);
13130 if (fld_init)
13131 return fld_init;
13135 return NULL_TREE;
13138 /* Determines the special array member type for the array reference REF. */
13139 special_array_member
13140 component_ref_sam_type (tree ref)
13142 special_array_member sam_type = special_array_member::none;
13144 tree member = TREE_OPERAND (ref, 1);
13145 tree memsize = DECL_SIZE_UNIT (member);
13146 if (memsize)
13148 tree memtype = TREE_TYPE (member);
13149 if (TREE_CODE (memtype) != ARRAY_TYPE)
13150 return sam_type;
13152 bool trailing = false;
13153 (void) array_ref_flexible_size_p (ref, &trailing);
13154 bool zero_elts = integer_zerop (memsize);
13155 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13157 /* If array element has zero size, verify if it is a flexible
13158 array member or zero length array. Clear zero_elts if
13159 it has one or more members or is a VLA member. */
13160 if (tree dom = TYPE_DOMAIN (memtype))
13161 if (tree min = TYPE_MIN_VALUE (dom))
13162 if (tree max = TYPE_MAX_VALUE (dom))
13163 if (TREE_CODE (min) != INTEGER_CST
13164 || TREE_CODE (max) != INTEGER_CST
13165 || !((integer_zerop (min) && integer_all_onesp (max))
13166 || tree_int_cst_lt (max, min)))
13167 zero_elts = false;
13169 if (!trailing && !zero_elts)
13170 /* MEMBER is an interior array with more than one element. */
13171 return special_array_member::int_n;
13173 if (zero_elts)
13175 if (trailing)
13176 return special_array_member::trail_0;
13177 else
13178 return special_array_member::int_0;
13181 if (!zero_elts)
13182 if (tree dom = TYPE_DOMAIN (memtype))
13183 if (tree min = TYPE_MIN_VALUE (dom))
13184 if (tree max = TYPE_MAX_VALUE (dom))
13185 if (TREE_CODE (min) == INTEGER_CST
13186 && TREE_CODE (max) == INTEGER_CST)
13188 offset_int minidx = wi::to_offset (min);
13189 offset_int maxidx = wi::to_offset (max);
13190 offset_int neltsm1 = maxidx - minidx;
13191 if (neltsm1 > 0)
13192 /* MEMBER is a trailing array with more than
13193 one elements. */
13194 return special_array_member::trail_n;
13196 if (neltsm1 == 0)
13197 return special_array_member::trail_1;
13201 return sam_type;
13204 /* Determines the size of the member referenced by the COMPONENT_REF
13205 REF, using its initializer expression if necessary in order to
13206 determine the size of an initialized flexible array member.
13207 If non-null, set *SAM to the type of special array member.
13208 Returns the size as sizetype (which might be zero for an object
13209 with an uninitialized flexible array member) or null if the size
13210 cannot be determined. */
13212 tree
13213 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13215 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13217 special_array_member sambuf;
13218 if (!sam)
13219 sam = &sambuf;
13220 *sam = component_ref_sam_type (ref);
13222 /* The object/argument referenced by the COMPONENT_REF and its type. */
13223 tree arg = TREE_OPERAND (ref, 0);
13224 tree argtype = TREE_TYPE (arg);
13225 /* The referenced member. */
13226 tree member = TREE_OPERAND (ref, 1);
13228 tree memsize = DECL_SIZE_UNIT (member);
13229 if (memsize)
13231 tree memtype = TREE_TYPE (member);
13232 if (TREE_CODE (memtype) != ARRAY_TYPE)
13233 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13234 to the type of a class with a virtual base which doesn't
13235 reflect the size of the virtual's members (see pr97595).
13236 If that's the case fail for now and implement something
13237 more robust in the future. */
13238 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13239 ? memsize : NULL_TREE);
13241 /* 2-or-more elements arrays are treated as normal arrays by default. */
13242 if (*sam == special_array_member::int_n
13243 || *sam == special_array_member::trail_n)
13244 return memsize;
13246 tree afield_decl = TREE_OPERAND (ref, 1);
13247 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13248 /* If the trailing array is a not a flexible array member, treat it as
13249 a normal array. */
13250 if (DECL_NOT_FLEXARRAY (afield_decl)
13251 && *sam != special_array_member::int_0)
13252 return memsize;
13254 if (*sam == special_array_member::int_0)
13255 memsize = NULL_TREE;
13257 /* For a reference to a flexible array member of a union
13258 use the size of the union instead of the size of the member. */
13259 if (TREE_CODE (argtype) == UNION_TYPE)
13260 memsize = TYPE_SIZE_UNIT (argtype);
13263 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13264 array member, or an array of length one treated as such. */
13266 /* If the reference is to a declared object and the member a true
13267 flexible array, try to determine its size from its initializer. */
13268 poly_int64 baseoff = 0;
13269 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13270 if (!base || !VAR_P (base))
13272 if (*sam != special_array_member::int_0)
13273 return NULL_TREE;
13275 if (TREE_CODE (arg) != COMPONENT_REF)
13276 return NULL_TREE;
13278 base = arg;
13279 while (TREE_CODE (base) == COMPONENT_REF)
13280 base = TREE_OPERAND (base, 0);
13281 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13284 /* BASE is the declared object of which MEMBER is either a member
13285 or that is cast to ARGTYPE (e.g., a char buffer used to store
13286 an ARGTYPE object). */
13287 tree basetype = TREE_TYPE (base);
13289 /* Determine the base type of the referenced object. If it's
13290 the same as ARGTYPE and MEMBER has a known size, return it. */
13291 tree bt = basetype;
13292 if (*sam != special_array_member::int_0)
13293 while (TREE_CODE (bt) == ARRAY_TYPE)
13294 bt = TREE_TYPE (bt);
13295 bool typematch = useless_type_conversion_p (argtype, bt);
13296 if (memsize && typematch)
13297 return memsize;
13299 memsize = NULL_TREE;
13301 if (typematch)
13302 /* MEMBER is a true flexible array member. Compute its size from
13303 the initializer of the BASE object if it has one. */
13304 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13305 if (init != error_mark_node)
13307 init = get_initializer_for (init, member);
13308 if (init)
13310 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13311 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13313 /* Use the larger of the initializer size and the tail
13314 padding in the enclosing struct. */
13315 poly_int64 rsz = tree_to_poly_int64 (refsize);
13316 rsz -= baseoff;
13317 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13318 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13321 baseoff = 0;
13325 if (!memsize)
13327 if (typematch)
13329 if (DECL_P (base)
13330 && DECL_EXTERNAL (base)
13331 && bt == basetype
13332 && *sam != special_array_member::int_0)
13333 /* The size of a flexible array member of an extern struct
13334 with no initializer cannot be determined (it's defined
13335 in another translation unit and can have an initializer
13336 with an arbitrary number of elements). */
13337 return NULL_TREE;
13339 /* Use the size of the base struct or, for interior zero-length
13340 arrays, the size of the enclosing type. */
13341 memsize = TYPE_SIZE_UNIT (bt);
13343 else if (DECL_P (base))
13344 /* Use the size of the BASE object (possibly an array of some
13345 other type such as char used to store the struct). */
13346 memsize = DECL_SIZE_UNIT (base);
13347 else
13348 return NULL_TREE;
13351 /* If the flexible array member has a known size use the greater
13352 of it and the tail padding in the enclosing struct.
13353 Otherwise, when the size of the flexible array member is unknown
13354 and the referenced object is not a struct, use the size of its
13355 type when known. This detects sizes of array buffers when cast
13356 to struct types with flexible array members. */
13357 if (memsize)
13359 if (!tree_fits_poly_int64_p (memsize))
13360 return NULL_TREE;
13361 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13362 if (known_lt (baseoff, memsz64))
13364 memsz64 -= baseoff;
13365 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13367 return size_zero_node;
13370 /* Return "don't know" for an external non-array object since its
13371 flexible array member can be initialized to have any number of
13372 elements. Otherwise, return zero because the flexible array
13373 member has no elements. */
13374 return (DECL_P (base)
13375 && DECL_EXTERNAL (base)
13376 && (!typematch
13377 || TREE_CODE (basetype) != ARRAY_TYPE)
13378 ? NULL_TREE : size_zero_node);
13381 /* Return the machine mode of T. For vectors, returns the mode of the
13382 inner type. The main use case is to feed the result to HONOR_NANS,
13383 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13385 machine_mode
13386 element_mode (const_tree t)
13388 if (!TYPE_P (t))
13389 t = TREE_TYPE (t);
13390 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13391 t = TREE_TYPE (t);
13392 return TYPE_MODE (t);
13395 /* Vector types need to re-check the target flags each time we report
13396 the machine mode. We need to do this because attribute target can
13397 change the result of vector_mode_supported_p and have_regs_of_mode
13398 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13399 change on a per-function basis. */
13400 /* ??? Possibly a better solution is to run through all the types
13401 referenced by a function and re-compute the TYPE_MODE once, rather
13402 than make the TYPE_MODE macro call a function. */
13404 machine_mode
13405 vector_type_mode (const_tree t)
13407 machine_mode mode;
13409 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13411 mode = t->type_common.mode;
13412 if (VECTOR_MODE_P (mode)
13413 && (!targetm.vector_mode_supported_p (mode)
13414 || !have_regs_of_mode[mode]))
13416 scalar_int_mode innermode;
13418 /* For integers, try mapping it to a same-sized scalar mode. */
13419 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13421 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13422 * GET_MODE_BITSIZE (innermode));
13423 scalar_int_mode mode;
13424 if (int_mode_for_size (size, 0).exists (&mode)
13425 && have_regs_of_mode[mode])
13426 return mode;
13429 return BLKmode;
13432 return mode;
13435 /* Return the size in bits of each element of vector type TYPE. */
13437 unsigned int
13438 vector_element_bits (const_tree type)
13440 gcc_checking_assert (VECTOR_TYPE_P (type));
13441 if (VECTOR_BOOLEAN_TYPE_P (type))
13442 return TYPE_PRECISION (TREE_TYPE (type));
13443 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13446 /* Calculate the size in bits of each element of vector type TYPE
13447 and return the result as a tree of type bitsizetype. */
13449 tree
13450 vector_element_bits_tree (const_tree type)
13452 gcc_checking_assert (VECTOR_TYPE_P (type));
13453 if (VECTOR_BOOLEAN_TYPE_P (type))
13454 return bitsize_int (vector_element_bits (type));
13455 return TYPE_SIZE (TREE_TYPE (type));
13458 /* Verify that basic properties of T match TV and thus T can be a variant of
13459 TV. TV should be the more specified variant (i.e. the main variant). */
13461 static bool
13462 verify_type_variant (const_tree t, tree tv)
13464 /* Type variant can differ by:
13466 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13467 ENCODE_QUAL_ADDR_SPACE.
13468 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13469 in this case some values may not be set in the variant types
13470 (see TYPE_COMPLETE_P checks).
13471 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13472 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13473 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13474 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13475 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13476 this is necessary to make it possible to merge types form different TUs
13477 - arrays, pointers and references may have TREE_TYPE that is a variant
13478 of TREE_TYPE of their main variants.
13479 - aggregates may have new TYPE_FIELDS list that list variants of
13480 the main variant TYPE_FIELDS.
13481 - vector types may differ by TYPE_VECTOR_OPAQUE
13484 /* Convenience macro for matching individual fields. */
13485 #define verify_variant_match(flag) \
13486 do { \
13487 if (flag (tv) != flag (t)) \
13489 error ("type variant differs by %s", #flag); \
13490 debug_tree (tv); \
13491 return false; \
13493 } while (false)
13495 /* tree_base checks. */
13497 verify_variant_match (TREE_CODE);
13498 /* FIXME: Ada builds non-artificial variants of artificial types. */
13499 #if 0
13500 if (TYPE_ARTIFICIAL (tv))
13501 verify_variant_match (TYPE_ARTIFICIAL);
13502 #endif
13503 if (POINTER_TYPE_P (tv))
13504 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13505 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13506 verify_variant_match (TYPE_UNSIGNED);
13507 verify_variant_match (TYPE_PACKED);
13508 if (TREE_CODE (t) == REFERENCE_TYPE)
13509 verify_variant_match (TYPE_REF_IS_RVALUE);
13510 if (AGGREGATE_TYPE_P (t))
13511 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13512 else
13513 verify_variant_match (TYPE_SATURATING);
13514 /* FIXME: This check trigger during libstdc++ build. */
13515 #if 0
13516 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13517 verify_variant_match (TYPE_FINAL_P);
13518 #endif
13520 /* tree_type_common checks. */
13522 if (COMPLETE_TYPE_P (t))
13524 verify_variant_match (TYPE_MODE);
13525 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13526 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13527 verify_variant_match (TYPE_SIZE);
13528 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13529 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13530 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13532 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13533 TYPE_SIZE_UNIT (tv), 0));
13534 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13535 debug_tree (tv);
13536 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13537 debug_tree (TYPE_SIZE_UNIT (tv));
13538 error ("type%'s %<TYPE_SIZE_UNIT%>");
13539 debug_tree (TYPE_SIZE_UNIT (t));
13540 return false;
13542 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13544 verify_variant_match (TYPE_PRECISION_RAW);
13545 if (RECORD_OR_UNION_TYPE_P (t))
13546 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13547 else if (TREE_CODE (t) == ARRAY_TYPE)
13548 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13549 /* During LTO we merge variant lists from diferent translation units
13550 that may differ BY TYPE_CONTEXT that in turn may point
13551 to TRANSLATION_UNIT_DECL.
13552 Ada also builds variants of types with different TYPE_CONTEXT. */
13553 #if 0
13554 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13555 verify_variant_match (TYPE_CONTEXT);
13556 #endif
13557 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13558 verify_variant_match (TYPE_STRING_FLAG);
13559 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13560 verify_variant_match (TYPE_CXX_ODR_P);
13561 if (TYPE_ALIAS_SET_KNOWN_P (t))
13563 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13564 debug_tree (tv);
13565 return false;
13568 /* tree_type_non_common checks. */
13570 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13571 and dangle the pointer from time to time. */
13572 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13573 && (in_lto_p || !TYPE_VFIELD (tv)
13574 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13576 error ("type variant has different %<TYPE_VFIELD%>");
13577 debug_tree (tv);
13578 return false;
13580 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13581 || TREE_CODE (t) == INTEGER_TYPE
13582 || TREE_CODE (t) == BOOLEAN_TYPE
13583 || TREE_CODE (t) == BITINT_TYPE
13584 || SCALAR_FLOAT_TYPE_P (t)
13585 || FIXED_POINT_TYPE_P (t))
13587 verify_variant_match (TYPE_MAX_VALUE);
13588 verify_variant_match (TYPE_MIN_VALUE);
13590 if (TREE_CODE (t) == METHOD_TYPE)
13591 verify_variant_match (TYPE_METHOD_BASETYPE);
13592 if (TREE_CODE (t) == OFFSET_TYPE)
13593 verify_variant_match (TYPE_OFFSET_BASETYPE);
13594 if (TREE_CODE (t) == ARRAY_TYPE)
13595 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13596 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13597 or even type's main variant. This is needed to make bootstrap pass
13598 and the bug seems new in GCC 5.
13599 C++ FE should be updated to make this consistent and we should check
13600 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13601 is a match with main variant.
13603 Also disable the check for Java for now because of parser hack that builds
13604 first an dummy BINFO and then sometimes replace it by real BINFO in some
13605 of the copies. */
13606 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13607 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13608 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13609 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13610 at LTO time only. */
13611 && (in_lto_p && odr_type_p (t)))
13613 error ("type variant has different %<TYPE_BINFO%>");
13614 debug_tree (tv);
13615 error ("type variant%'s %<TYPE_BINFO%>");
13616 debug_tree (TYPE_BINFO (tv));
13617 error ("type%'s %<TYPE_BINFO%>");
13618 debug_tree (TYPE_BINFO (t));
13619 return false;
13622 /* Check various uses of TYPE_VALUES_RAW. */
13623 if (TREE_CODE (t) == ENUMERAL_TYPE
13624 && TYPE_VALUES (t))
13625 verify_variant_match (TYPE_VALUES);
13626 else if (TREE_CODE (t) == ARRAY_TYPE)
13627 verify_variant_match (TYPE_DOMAIN);
13628 /* Permit incomplete variants of complete type. While FEs may complete
13629 all variants, this does not happen for C++ templates in all cases. */
13630 else if (RECORD_OR_UNION_TYPE_P (t)
13631 && COMPLETE_TYPE_P (t)
13632 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13634 tree f1, f2;
13636 /* Fortran builds qualified variants as new records with items of
13637 qualified type. Verify that they looks same. */
13638 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13639 f1 && f2;
13640 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13641 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13642 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13643 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13644 /* FIXME: gfc_nonrestricted_type builds all types as variants
13645 with exception of pointer types. It deeply copies the type
13646 which means that we may end up with a variant type
13647 referring non-variant pointer. We may change it to
13648 produce types as variants, too, like
13649 objc_get_protocol_qualified_type does. */
13650 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13651 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13652 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13653 break;
13654 if (f1 || f2)
13656 error ("type variant has different %<TYPE_FIELDS%>");
13657 debug_tree (tv);
13658 error ("first mismatch is field");
13659 debug_tree (f1);
13660 error ("and field");
13661 debug_tree (f2);
13662 return false;
13665 else if (FUNC_OR_METHOD_TYPE_P (t))
13666 verify_variant_match (TYPE_ARG_TYPES);
13667 /* For C++ the qualified variant of array type is really an array type
13668 of qualified TREE_TYPE.
13669 objc builds variants of pointer where pointer to type is a variant, too
13670 in objc_get_protocol_qualified_type. */
13671 if (TREE_TYPE (t) != TREE_TYPE (tv)
13672 && ((TREE_CODE (t) != ARRAY_TYPE
13673 && !POINTER_TYPE_P (t))
13674 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13675 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13677 error ("type variant has different %<TREE_TYPE%>");
13678 debug_tree (tv);
13679 error ("type variant%'s %<TREE_TYPE%>");
13680 debug_tree (TREE_TYPE (tv));
13681 error ("type%'s %<TREE_TYPE%>");
13682 debug_tree (TREE_TYPE (t));
13683 return false;
13685 if (type_with_alias_set_p (t)
13686 && !gimple_canonical_types_compatible_p (t, tv, false))
13688 error ("type is not compatible with its variant");
13689 debug_tree (tv);
13690 error ("type variant%'s %<TREE_TYPE%>");
13691 debug_tree (TREE_TYPE (tv));
13692 error ("type%'s %<TREE_TYPE%>");
13693 debug_tree (TREE_TYPE (t));
13694 return false;
13696 return true;
13697 #undef verify_variant_match
13701 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13702 the middle-end types_compatible_p function. It needs to avoid
13703 claiming types are different for types that should be treated
13704 the same with respect to TBAA. Canonical types are also used
13705 for IL consistency checks via the useless_type_conversion_p
13706 predicate which does not handle all type kinds itself but falls
13707 back to pointer-comparison of TYPE_CANONICAL for aggregates
13708 for example. */
13710 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13711 type calculation because we need to allow inter-operability between signed
13712 and unsigned variants. */
13714 bool
13715 type_with_interoperable_signedness (const_tree type)
13717 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13718 signed char and unsigned char. Similarly fortran FE builds
13719 C_SIZE_T as signed type, while C defines it unsigned. */
13721 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13722 == INTEGER_TYPE
13723 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13724 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13727 /* Return true iff T1 and T2 are structurally identical for what
13728 TBAA is concerned.
13729 This function is used both by lto.cc canonical type merging and by the
13730 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13731 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13732 only for LTO because only in these cases TYPE_CANONICAL equivalence
13733 correspond to one defined by gimple_canonical_types_compatible_p. */
13735 bool
13736 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13737 bool trust_type_canonical)
13739 /* Type variants should be same as the main variant. When not doing sanity
13740 checking to verify this fact, go to main variants and save some work. */
13741 if (trust_type_canonical)
13743 t1 = TYPE_MAIN_VARIANT (t1);
13744 t2 = TYPE_MAIN_VARIANT (t2);
13747 /* Check first for the obvious case of pointer identity. */
13748 if (t1 == t2)
13749 return true;
13751 /* Check that we have two types to compare. */
13752 if (t1 == NULL_TREE || t2 == NULL_TREE)
13753 return false;
13755 /* We consider complete types always compatible with incomplete type.
13756 This does not make sense for canonical type calculation and thus we
13757 need to ensure that we are never called on it.
13759 FIXME: For more correctness the function probably should have three modes
13760 1) mode assuming that types are complete mathcing their structure
13761 2) mode allowing incomplete types but producing equivalence classes
13762 and thus ignoring all info from complete types
13763 3) mode allowing incomplete types to match complete but checking
13764 compatibility between complete types.
13766 1 and 2 can be used for canonical type calculation. 3 is the real
13767 definition of type compatibility that can be used i.e. for warnings during
13768 declaration merging. */
13770 gcc_assert (!trust_type_canonical
13771 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13773 /* If the types have been previously registered and found equal
13774 they still are. */
13776 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13777 && trust_type_canonical)
13779 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13780 they are always NULL, but they are set to non-NULL for types
13781 constructed by build_pointer_type and variants. In this case the
13782 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13783 all pointers are considered equal. Be sure to not return false
13784 negatives. */
13785 gcc_checking_assert (canonical_type_used_p (t1)
13786 && canonical_type_used_p (t2));
13787 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13790 /* For types where we do ODR based TBAA the canonical type is always
13791 set correctly, so we know that types are different if their
13792 canonical types does not match. */
13793 if (trust_type_canonical
13794 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13795 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13796 return false;
13798 /* Can't be the same type if the types don't have the same code. */
13799 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13800 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13801 return false;
13803 /* Qualifiers do not matter for canonical type comparison purposes. */
13805 /* Void types and nullptr types are always the same. */
13806 if (VOID_TYPE_P (t1)
13807 || TREE_CODE (t1) == NULLPTR_TYPE)
13808 return true;
13810 /* Can't be the same type if they have different mode. */
13811 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13812 return false;
13814 /* Non-aggregate types can be handled cheaply. */
13815 if (INTEGRAL_TYPE_P (t1)
13816 || SCALAR_FLOAT_TYPE_P (t1)
13817 || FIXED_POINT_TYPE_P (t1)
13818 || VECTOR_TYPE_P (t1)
13819 || TREE_CODE (t1) == COMPLEX_TYPE
13820 || TREE_CODE (t1) == OFFSET_TYPE
13821 || POINTER_TYPE_P (t1))
13823 /* Can't be the same type if they have different precision. */
13824 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13825 return false;
13827 /* In some cases the signed and unsigned types are required to be
13828 inter-operable. */
13829 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13830 && !type_with_interoperable_signedness (t1))
13831 return false;
13833 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13834 interoperable with "signed char". Unless all frontends are revisited
13835 to agree on these types, we must ignore the flag completely. */
13837 /* Fortran standard define C_PTR type that is compatible with every
13838 C pointer. For this reason we need to glob all pointers into one.
13839 Still pointers in different address spaces are not compatible. */
13840 if (POINTER_TYPE_P (t1))
13842 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13843 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13844 return false;
13847 /* Tail-recurse to components. */
13848 if (VECTOR_TYPE_P (t1)
13849 || TREE_CODE (t1) == COMPLEX_TYPE)
13850 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13851 TREE_TYPE (t2),
13852 trust_type_canonical);
13854 return true;
13857 /* Do type-specific comparisons. */
13858 switch (TREE_CODE (t1))
13860 case ARRAY_TYPE:
13861 /* Array types are the same if the element types are the same and
13862 the number of elements are the same. */
13863 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13864 trust_type_canonical)
13865 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13866 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13867 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13868 return false;
13869 else
13871 tree i1 = TYPE_DOMAIN (t1);
13872 tree i2 = TYPE_DOMAIN (t2);
13874 /* For an incomplete external array, the type domain can be
13875 NULL_TREE. Check this condition also. */
13876 if (i1 == NULL_TREE && i2 == NULL_TREE)
13877 return true;
13878 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13879 return false;
13880 else
13882 tree min1 = TYPE_MIN_VALUE (i1);
13883 tree min2 = TYPE_MIN_VALUE (i2);
13884 tree max1 = TYPE_MAX_VALUE (i1);
13885 tree max2 = TYPE_MAX_VALUE (i2);
13887 /* The minimum/maximum values have to be the same. */
13888 if ((min1 == min2
13889 || (min1 && min2
13890 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13891 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13892 || operand_equal_p (min1, min2, 0))))
13893 && (max1 == max2
13894 || (max1 && max2
13895 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13896 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13897 || operand_equal_p (max1, max2, 0)))))
13898 return true;
13899 else
13900 return false;
13904 case METHOD_TYPE:
13905 case FUNCTION_TYPE:
13906 /* Function types are the same if the return type and arguments types
13907 are the same. */
13908 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13909 trust_type_canonical))
13910 return false;
13912 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13913 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13914 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13915 return true;
13916 else
13918 tree parms1, parms2;
13920 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13921 parms1 && parms2;
13922 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13924 if (!gimple_canonical_types_compatible_p
13925 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13926 trust_type_canonical))
13927 return false;
13930 if (parms1 || parms2)
13931 return false;
13933 return true;
13936 case RECORD_TYPE:
13937 case UNION_TYPE:
13938 case QUAL_UNION_TYPE:
13940 tree f1, f2;
13942 /* Don't try to compare variants of an incomplete type, before
13943 TYPE_FIELDS has been copied around. */
13944 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13945 return true;
13948 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13949 return false;
13951 /* For aggregate types, all the fields must be the same. */
13952 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13953 f1 || f2;
13954 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13956 /* Skip non-fields and zero-sized fields. */
13957 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13958 || (DECL_SIZE (f1)
13959 && integer_zerop (DECL_SIZE (f1)))))
13960 f1 = TREE_CHAIN (f1);
13961 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13962 || (DECL_SIZE (f2)
13963 && integer_zerop (DECL_SIZE (f2)))))
13964 f2 = TREE_CHAIN (f2);
13965 if (!f1 || !f2)
13966 break;
13967 /* The fields must have the same name, offset and type. */
13968 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13969 || !gimple_compare_field_offset (f1, f2)
13970 || !gimple_canonical_types_compatible_p
13971 (TREE_TYPE (f1), TREE_TYPE (f2),
13972 trust_type_canonical))
13973 return false;
13976 /* If one aggregate has more fields than the other, they
13977 are not the same. */
13978 if (f1 || f2)
13979 return false;
13981 return true;
13984 default:
13985 /* Consider all types with language specific trees in them mutually
13986 compatible. This is executed only from verify_type and false
13987 positives can be tolerated. */
13988 gcc_assert (!in_lto_p);
13989 return true;
13993 /* For OPAQUE_TYPE T, it should have only size and alignment information
13994 and its mode should be of class MODE_OPAQUE. This function verifies
13995 these properties of T match TV which is the main variant of T and TC
13996 which is the canonical of T. */
13998 static void
13999 verify_opaque_type (const_tree t, tree tv, tree tc)
14001 gcc_assert (OPAQUE_TYPE_P (t));
14002 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
14003 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
14005 /* For an opaque type T1, check if some of its properties match
14006 the corresponding ones of the other opaque type T2, emit some
14007 error messages for those inconsistent ones. */
14008 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
14009 const char *kind_msg)
14011 if (!OPAQUE_TYPE_P (t2))
14013 error ("type %s is not an opaque type", kind_msg);
14014 debug_tree (t2);
14015 return;
14017 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
14019 error ("type %s is not with opaque mode", kind_msg);
14020 debug_tree (t2);
14021 return;
14023 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14025 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
14026 debug_tree (t2);
14027 return;
14029 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
14030 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
14031 if (maybe_ne (t1_size, t2_size))
14033 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
14034 debug_tree (t2);
14035 return;
14037 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
14039 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
14040 debug_tree (t2);
14041 return;
14043 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
14045 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
14046 debug_tree (t2);
14047 return;
14051 if (t != tv)
14052 check_properties_for_opaque_type (t, tv, "variant");
14054 if (t != tc)
14055 check_properties_for_opaque_type (t, tc, "canonical");
14058 /* Verify type T. */
14060 void
14061 verify_type (const_tree t)
14063 bool error_found = false;
14064 tree mv = TYPE_MAIN_VARIANT (t);
14065 tree ct = TYPE_CANONICAL (t);
14067 if (OPAQUE_TYPE_P (t))
14069 verify_opaque_type (t, mv, ct);
14070 return;
14073 if (!mv)
14075 error ("main variant is not defined");
14076 error_found = true;
14078 else if (mv != TYPE_MAIN_VARIANT (mv))
14080 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14081 debug_tree (mv);
14082 error_found = true;
14084 else if (t != mv && !verify_type_variant (t, mv))
14085 error_found = true;
14087 if (!ct)
14089 else if (TYPE_CANONICAL (ct) != ct)
14091 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14092 debug_tree (ct);
14093 error_found = true;
14095 /* Method and function types cannot be used to address memory and thus
14096 TYPE_CANONICAL really matters only for determining useless conversions.
14098 FIXME: C++ FE produce declarations of builtin functions that are not
14099 compatible with main variants. */
14100 else if (TREE_CODE (t) == FUNCTION_TYPE)
14102 else if (t != ct
14103 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14104 with variably sized arrays because their sizes possibly
14105 gimplified to different variables. */
14106 && !variably_modified_type_p (ct, NULL)
14107 && !gimple_canonical_types_compatible_p (t, ct, false)
14108 && COMPLETE_TYPE_P (t))
14110 error ("%<TYPE_CANONICAL%> is not compatible");
14111 debug_tree (ct);
14112 error_found = true;
14115 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14116 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14118 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14119 debug_tree (ct);
14120 error_found = true;
14122 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14124 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14125 debug_tree (ct);
14126 debug_tree (TYPE_MAIN_VARIANT (ct));
14127 error_found = true;
14131 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14132 if (RECORD_OR_UNION_TYPE_P (t))
14134 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14135 and danagle the pointer from time to time. */
14136 if (TYPE_VFIELD (t)
14137 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14138 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14140 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14141 debug_tree (TYPE_VFIELD (t));
14142 error_found = true;
14145 else if (TREE_CODE (t) == POINTER_TYPE)
14147 if (TYPE_NEXT_PTR_TO (t)
14148 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14150 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14151 debug_tree (TYPE_NEXT_PTR_TO (t));
14152 error_found = true;
14155 else if (TREE_CODE (t) == REFERENCE_TYPE)
14157 if (TYPE_NEXT_REF_TO (t)
14158 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14160 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14161 debug_tree (TYPE_NEXT_REF_TO (t));
14162 error_found = true;
14165 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14166 || FIXED_POINT_TYPE_P (t))
14168 /* FIXME: The following check should pass:
14169 useless_type_conversion_p (const_cast <tree> (t),
14170 TREE_TYPE (TYPE_MIN_VALUE (t))
14171 but does not for C sizetypes in LTO. */
14174 /* Check various uses of TYPE_MAXVAL_RAW. */
14175 if (RECORD_OR_UNION_TYPE_P (t))
14177 if (!TYPE_BINFO (t))
14179 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14181 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14182 debug_tree (TYPE_BINFO (t));
14183 error_found = true;
14185 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14187 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14188 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14189 error_found = true;
14192 else if (FUNC_OR_METHOD_TYPE_P (t))
14194 if (TYPE_METHOD_BASETYPE (t)
14195 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14196 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14198 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14199 debug_tree (TYPE_METHOD_BASETYPE (t));
14200 error_found = true;
14203 else if (TREE_CODE (t) == OFFSET_TYPE)
14205 if (TYPE_OFFSET_BASETYPE (t)
14206 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14207 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14209 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14210 debug_tree (TYPE_OFFSET_BASETYPE (t));
14211 error_found = true;
14214 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14215 || FIXED_POINT_TYPE_P (t))
14217 /* FIXME: The following check should pass:
14218 useless_type_conversion_p (const_cast <tree> (t),
14219 TREE_TYPE (TYPE_MAX_VALUE (t))
14220 but does not for C sizetypes in LTO. */
14222 else if (TREE_CODE (t) == ARRAY_TYPE)
14224 if (TYPE_ARRAY_MAX_SIZE (t)
14225 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14227 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14228 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14229 error_found = true;
14232 else if (TYPE_MAX_VALUE_RAW (t))
14234 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14235 debug_tree (TYPE_MAX_VALUE_RAW (t));
14236 error_found = true;
14239 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14241 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14242 debug_tree (TYPE_LANG_SLOT_1 (t));
14243 error_found = true;
14246 /* Check various uses of TYPE_VALUES_RAW. */
14247 if (TREE_CODE (t) == ENUMERAL_TYPE)
14248 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14250 tree value = TREE_VALUE (l);
14251 tree name = TREE_PURPOSE (l);
14253 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14254 CONST_DECL of ENUMERAL TYPE. */
14255 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14257 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14258 debug_tree (value);
14259 debug_tree (name);
14260 error_found = true;
14262 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14263 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14264 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14266 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14267 "to the enum");
14268 debug_tree (value);
14269 debug_tree (name);
14270 error_found = true;
14272 if (TREE_CODE (name) != IDENTIFIER_NODE)
14274 error ("enum value name is not %<IDENTIFIER_NODE%>");
14275 debug_tree (value);
14276 debug_tree (name);
14277 error_found = true;
14280 else if (TREE_CODE (t) == ARRAY_TYPE)
14282 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14284 error ("array %<TYPE_DOMAIN%> is not integer type");
14285 debug_tree (TYPE_DOMAIN (t));
14286 error_found = true;
14289 else if (RECORD_OR_UNION_TYPE_P (t))
14291 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14293 error ("%<TYPE_FIELDS%> defined in incomplete type");
14294 error_found = true;
14296 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14298 /* TODO: verify properties of decls. */
14299 if (TREE_CODE (fld) == FIELD_DECL)
14301 else if (TREE_CODE (fld) == TYPE_DECL)
14303 else if (TREE_CODE (fld) == CONST_DECL)
14305 else if (VAR_P (fld))
14307 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14309 else if (TREE_CODE (fld) == USING_DECL)
14311 else if (TREE_CODE (fld) == FUNCTION_DECL)
14313 else
14315 error ("wrong tree in %<TYPE_FIELDS%> list");
14316 debug_tree (fld);
14317 error_found = true;
14321 else if (TREE_CODE (t) == INTEGER_TYPE
14322 || TREE_CODE (t) == BOOLEAN_TYPE
14323 || TREE_CODE (t) == BITINT_TYPE
14324 || TREE_CODE (t) == OFFSET_TYPE
14325 || TREE_CODE (t) == REFERENCE_TYPE
14326 || TREE_CODE (t) == NULLPTR_TYPE
14327 || TREE_CODE (t) == POINTER_TYPE)
14329 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14331 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14332 "is %p",
14333 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14334 error_found = true;
14336 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14338 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14339 debug_tree (TYPE_CACHED_VALUES (t));
14340 error_found = true;
14342 /* Verify just enough of cache to ensure that no one copied it to new type.
14343 All copying should go by copy_node that should clear it. */
14344 else if (TYPE_CACHED_VALUES_P (t))
14346 int i;
14347 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14348 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14349 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14351 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14352 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14353 error_found = true;
14354 break;
14358 else if (FUNC_OR_METHOD_TYPE_P (t))
14359 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14361 /* C++ FE uses TREE_PURPOSE to store initial values. */
14362 if (TREE_PURPOSE (l) && in_lto_p)
14364 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14365 debug_tree (l);
14366 error_found = true;
14368 if (!TYPE_P (TREE_VALUE (l)))
14370 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14371 debug_tree (l);
14372 error_found = true;
14375 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14377 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14378 debug_tree (TYPE_VALUES_RAW (t));
14379 error_found = true;
14381 if (TREE_CODE (t) != INTEGER_TYPE
14382 && TREE_CODE (t) != BOOLEAN_TYPE
14383 && TREE_CODE (t) != BITINT_TYPE
14384 && TREE_CODE (t) != OFFSET_TYPE
14385 && TREE_CODE (t) != REFERENCE_TYPE
14386 && TREE_CODE (t) != NULLPTR_TYPE
14387 && TREE_CODE (t) != POINTER_TYPE
14388 && TYPE_CACHED_VALUES_P (t))
14390 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14391 error_found = true;
14394 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14395 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14396 of a type. */
14397 if (TREE_CODE (t) == METHOD_TYPE
14398 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14400 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14401 error_found = true;
14404 if (error_found)
14406 debug_tree (const_cast <tree> (t));
14407 internal_error ("%qs failed", __func__);
14412 /* Return 1 if ARG interpreted as signed in its precision is known to be
14413 always positive or 2 if ARG is known to be always negative, or 3 if
14414 ARG may be positive or negative. */
14417 get_range_pos_neg (tree arg)
14419 if (arg == error_mark_node)
14420 return 3;
14422 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14423 int cnt = 0;
14424 if (TREE_CODE (arg) == INTEGER_CST)
14426 wide_int w = wi::sext (wi::to_wide (arg), prec);
14427 if (wi::neg_p (w))
14428 return 2;
14429 else
14430 return 1;
14432 while (CONVERT_EXPR_P (arg)
14433 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14434 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14436 arg = TREE_OPERAND (arg, 0);
14437 /* Narrower value zero extended into wider type
14438 will always result in positive values. */
14439 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14440 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14441 return 1;
14442 prec = TYPE_PRECISION (TREE_TYPE (arg));
14443 if (++cnt > 30)
14444 return 3;
14447 if (TREE_CODE (arg) != SSA_NAME)
14448 return 3;
14449 value_range r;
14450 while (!get_global_range_query ()->range_of_expr (r, arg)
14451 || r.undefined_p () || r.varying_p ())
14453 gimple *g = SSA_NAME_DEF_STMT (arg);
14454 if (is_gimple_assign (g)
14455 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14457 tree t = gimple_assign_rhs1 (g);
14458 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14459 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14461 if (TYPE_UNSIGNED (TREE_TYPE (t))
14462 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14463 return 1;
14464 prec = TYPE_PRECISION (TREE_TYPE (t));
14465 arg = t;
14466 if (++cnt > 30)
14467 return 3;
14468 continue;
14471 return 3;
14473 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14475 /* For unsigned values, the "positive" range comes
14476 below the "negative" range. */
14477 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14478 return 1;
14479 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14480 return 2;
14482 else
14484 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14485 return 1;
14486 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14487 return 2;
14489 return 3;
14495 /* Return true if ARG is marked with the nonnull attribute in the
14496 current function signature. */
14498 bool
14499 nonnull_arg_p (const_tree arg)
14501 tree t, attrs, fntype;
14502 unsigned HOST_WIDE_INT arg_num;
14504 gcc_assert (TREE_CODE (arg) == PARM_DECL
14505 && (POINTER_TYPE_P (TREE_TYPE (arg))
14506 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14508 /* The static chain decl is always non null. */
14509 if (arg == cfun->static_chain_decl)
14510 return true;
14512 /* THIS argument of method is always non-NULL. */
14513 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14514 && arg == DECL_ARGUMENTS (cfun->decl)
14515 && flag_delete_null_pointer_checks)
14516 return true;
14518 /* Values passed by reference are always non-NULL. */
14519 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14520 && flag_delete_null_pointer_checks)
14521 return true;
14523 fntype = TREE_TYPE (cfun->decl);
14524 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14526 attrs = lookup_attribute ("nonnull", attrs);
14528 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14529 if (attrs == NULL_TREE)
14530 return false;
14532 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14533 if (TREE_VALUE (attrs) == NULL_TREE)
14534 return true;
14536 /* Get the position number for ARG in the function signature. */
14537 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14539 t = DECL_CHAIN (t), arg_num++)
14541 if (t == arg)
14542 break;
14545 gcc_assert (t == arg);
14547 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14548 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14550 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14551 return true;
14555 return false;
14558 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14559 information. */
14561 location_t
14562 set_block (location_t loc, tree block)
14564 location_t pure_loc = get_pure_location (loc);
14565 source_range src_range = get_range_from_loc (line_table, loc);
14566 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14567 return line_table->get_or_create_combined_loc (pure_loc, src_range, block,
14568 discriminator);
14571 location_t
14572 set_source_range (tree expr, location_t start, location_t finish)
14574 source_range src_range;
14575 src_range.m_start = start;
14576 src_range.m_finish = finish;
14577 return set_source_range (expr, src_range);
14580 location_t
14581 set_source_range (tree expr, source_range src_range)
14583 if (!EXPR_P (expr))
14584 return UNKNOWN_LOCATION;
14586 location_t expr_location = EXPR_LOCATION (expr);
14587 location_t pure_loc = get_pure_location (expr_location);
14588 unsigned discriminator = get_discriminator_from_loc (expr_location);
14589 location_t adhoc = line_table->get_or_create_combined_loc (pure_loc,
14590 src_range,
14591 nullptr,
14592 discriminator);
14593 SET_EXPR_LOCATION (expr, adhoc);
14594 return adhoc;
14597 /* Return EXPR, potentially wrapped with a node expression LOC,
14598 if !CAN_HAVE_LOCATION_P (expr).
14600 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14601 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14603 Wrapper nodes can be identified using location_wrapper_p. */
14605 tree
14606 maybe_wrap_with_location (tree expr, location_t loc)
14608 if (expr == NULL)
14609 return NULL;
14610 if (loc == UNKNOWN_LOCATION)
14611 return expr;
14612 if (CAN_HAVE_LOCATION_P (expr))
14613 return expr;
14614 /* We should only be adding wrappers for constants and for decls,
14615 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14616 gcc_assert (CONSTANT_CLASS_P (expr)
14617 || DECL_P (expr)
14618 || EXCEPTIONAL_CLASS_P (expr));
14620 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14621 any impact of the wrapper nodes. */
14622 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14623 return expr;
14625 /* Compiler-generated temporary variables don't need a wrapper. */
14626 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14627 return expr;
14629 /* If any auto_suppress_location_wrappers are active, don't create
14630 wrappers. */
14631 if (suppress_location_wrappers > 0)
14632 return expr;
14634 tree_code code
14635 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14636 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14637 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14638 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14639 /* Mark this node as being a wrapper. */
14640 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14641 return wrapper;
14644 int suppress_location_wrappers;
14646 /* Return the name of combined function FN, for debugging purposes. */
14648 const char *
14649 combined_fn_name (combined_fn fn)
14651 if (builtin_fn_p (fn))
14653 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14654 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14656 else
14657 return internal_fn_name (as_internal_fn (fn));
14660 /* Return a bitmap with a bit set corresponding to each argument in
14661 a function call type FNTYPE declared with attribute nonnull,
14662 or null if none of the function's argument are nonnull. The caller
14663 must free the bitmap. */
14665 bitmap
14666 get_nonnull_args (const_tree fntype)
14668 if (fntype == NULL_TREE)
14669 return NULL;
14671 bitmap argmap = NULL;
14672 if (TREE_CODE (fntype) == METHOD_TYPE)
14674 /* The this pointer in C++ non-static member functions is
14675 implicitly nonnull whether or not it's declared as such. */
14676 argmap = BITMAP_ALLOC (NULL);
14677 bitmap_set_bit (argmap, 0);
14680 tree attrs = TYPE_ATTRIBUTES (fntype);
14681 if (!attrs)
14682 return argmap;
14684 /* A function declaration can specify multiple attribute nonnull,
14685 each with zero or more arguments. The loop below creates a bitmap
14686 representing a union of all the arguments. An empty (but non-null)
14687 bitmap means that all arguments have been declaraed nonnull. */
14688 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14690 attrs = lookup_attribute ("nonnull", attrs);
14691 if (!attrs)
14692 break;
14694 if (!argmap)
14695 argmap = BITMAP_ALLOC (NULL);
14697 if (!TREE_VALUE (attrs))
14699 /* Clear the bitmap in case a previous attribute nonnull
14700 set it and this one overrides it for all arguments. */
14701 bitmap_clear (argmap);
14702 return argmap;
14705 /* Iterate over the indices of the format arguments declared nonnull
14706 and set a bit for each. */
14707 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14709 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14710 bitmap_set_bit (argmap, val);
14714 return argmap;
14717 /* Returns true if TYPE is a type where it and all of its subobjects
14718 (recursively) are of structure, union, or array type. */
14720 bool
14721 is_empty_type (const_tree type)
14723 if (RECORD_OR_UNION_TYPE_P (type))
14725 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14726 if (TREE_CODE (field) == FIELD_DECL
14727 && !DECL_PADDING_P (field)
14728 && !is_empty_type (TREE_TYPE (field)))
14729 return false;
14730 return true;
14732 else if (TREE_CODE (type) == ARRAY_TYPE)
14733 return (integer_minus_onep (array_type_nelts (type))
14734 || TYPE_DOMAIN (type) == NULL_TREE
14735 || is_empty_type (TREE_TYPE (type)));
14736 return false;
14739 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14740 that shouldn't be passed via stack. */
14742 bool
14743 default_is_empty_record (const_tree type)
14745 if (!abi_version_at_least (12))
14746 return false;
14748 if (type == error_mark_node)
14749 return false;
14751 if (TREE_ADDRESSABLE (type))
14752 return false;
14754 return is_empty_type (TYPE_MAIN_VARIANT (type));
14757 /* Determine whether TYPE is a structure with a flexible array member,
14758 or a union containing such a structure (possibly recursively). */
14760 bool
14761 flexible_array_type_p (const_tree type)
14763 tree x, last;
14764 switch (TREE_CODE (type))
14766 case RECORD_TYPE:
14767 last = NULL_TREE;
14768 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14769 if (TREE_CODE (x) == FIELD_DECL)
14770 last = x;
14771 if (last == NULL_TREE)
14772 return false;
14773 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14774 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14775 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14776 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14777 return true;
14778 return false;
14779 case UNION_TYPE:
14780 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14782 if (TREE_CODE (x) == FIELD_DECL
14783 && flexible_array_type_p (TREE_TYPE (x)))
14784 return true;
14786 return false;
14787 default:
14788 return false;
14792 /* Like int_size_in_bytes, but handle empty records specially. */
14794 HOST_WIDE_INT
14795 arg_int_size_in_bytes (const_tree type)
14797 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14800 /* Like size_in_bytes, but handle empty records specially. */
14802 tree
14803 arg_size_in_bytes (const_tree type)
14805 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14808 /* Return true if an expression with CODE has to have the same result type as
14809 its first operand. */
14811 bool
14812 expr_type_first_operand_type_p (tree_code code)
14814 switch (code)
14816 case NEGATE_EXPR:
14817 case ABS_EXPR:
14818 case BIT_NOT_EXPR:
14819 case PAREN_EXPR:
14820 case CONJ_EXPR:
14822 case PLUS_EXPR:
14823 case MINUS_EXPR:
14824 case MULT_EXPR:
14825 case TRUNC_DIV_EXPR:
14826 case CEIL_DIV_EXPR:
14827 case FLOOR_DIV_EXPR:
14828 case ROUND_DIV_EXPR:
14829 case TRUNC_MOD_EXPR:
14830 case CEIL_MOD_EXPR:
14831 case FLOOR_MOD_EXPR:
14832 case ROUND_MOD_EXPR:
14833 case RDIV_EXPR:
14834 case EXACT_DIV_EXPR:
14835 case MIN_EXPR:
14836 case MAX_EXPR:
14837 case BIT_IOR_EXPR:
14838 case BIT_XOR_EXPR:
14839 case BIT_AND_EXPR:
14841 case LSHIFT_EXPR:
14842 case RSHIFT_EXPR:
14843 case LROTATE_EXPR:
14844 case RROTATE_EXPR:
14845 return true;
14847 default:
14848 return false;
14852 /* Return a typenode for the "standard" C type with a given name. */
14853 tree
14854 get_typenode_from_name (const char *name)
14856 if (name == NULL || *name == '\0')
14857 return NULL_TREE;
14859 if (strcmp (name, "char") == 0)
14860 return char_type_node;
14861 if (strcmp (name, "unsigned char") == 0)
14862 return unsigned_char_type_node;
14863 if (strcmp (name, "signed char") == 0)
14864 return signed_char_type_node;
14866 if (strcmp (name, "short int") == 0)
14867 return short_integer_type_node;
14868 if (strcmp (name, "short unsigned int") == 0)
14869 return short_unsigned_type_node;
14871 if (strcmp (name, "int") == 0)
14872 return integer_type_node;
14873 if (strcmp (name, "unsigned int") == 0)
14874 return unsigned_type_node;
14876 if (strcmp (name, "long int") == 0)
14877 return long_integer_type_node;
14878 if (strcmp (name, "long unsigned int") == 0)
14879 return long_unsigned_type_node;
14881 if (strcmp (name, "long long int") == 0)
14882 return long_long_integer_type_node;
14883 if (strcmp (name, "long long unsigned int") == 0)
14884 return long_long_unsigned_type_node;
14886 gcc_unreachable ();
14889 /* List of pointer types used to declare builtins before we have seen their
14890 real declaration.
14892 Keep the size up to date in tree.h ! */
14893 const builtin_structptr_type builtin_structptr_types[6] =
14895 { fileptr_type_node, ptr_type_node, "FILE" },
14896 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14897 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14898 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14899 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14900 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14903 /* Return the maximum object size. */
14905 tree
14906 max_object_size (void)
14908 /* To do: Make this a configurable parameter. */
14909 return TYPE_MAX_VALUE (ptrdiff_type_node);
14912 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14913 parameter default to false and that weeds out error_mark_node. */
14915 bool
14916 verify_type_context (location_t loc, type_context_kind context,
14917 const_tree type, bool silent_p)
14919 if (type == error_mark_node)
14920 return true;
14922 gcc_assert (TYPE_P (type));
14923 return (!targetm.verify_type_context
14924 || targetm.verify_type_context (loc, context, type, silent_p));
14927 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14928 delete operators. Return false if they may or may not name such
14929 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14930 do not. */
14932 bool
14933 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14934 bool *pcertain /* = NULL */)
14936 bool certain;
14937 if (!pcertain)
14938 pcertain = &certain;
14940 const char *new_name = IDENTIFIER_POINTER (new_asm);
14941 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14942 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14943 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14945 /* The following failures are due to invalid names so they're not
14946 considered certain mismatches. */
14947 *pcertain = false;
14949 if (new_len < 5 || delete_len < 6)
14950 return false;
14951 if (new_name[0] == '_')
14952 ++new_name, --new_len;
14953 if (new_name[0] == '_')
14954 ++new_name, --new_len;
14955 if (delete_name[0] == '_')
14956 ++delete_name, --delete_len;
14957 if (delete_name[0] == '_')
14958 ++delete_name, --delete_len;
14959 if (new_len < 4 || delete_len < 5)
14960 return false;
14962 /* The following failures are due to names of user-defined operators
14963 so they're also not considered certain mismatches. */
14965 /* *_len is now just the length after initial underscores. */
14966 if (new_name[0] != 'Z' || new_name[1] != 'n')
14967 return false;
14968 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14969 return false;
14971 /* The following failures are certain mismatches. */
14972 *pcertain = true;
14974 /* _Znw must match _Zdl, _Zna must match _Zda. */
14975 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14976 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14977 return false;
14978 /* 'j', 'm' and 'y' correspond to size_t. */
14979 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14980 return false;
14981 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14982 return false;
14983 if (new_len == 4
14984 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14986 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14987 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14988 if (delete_len == 5)
14989 return true;
14990 if (delete_len == 6 && delete_name[5] == new_name[3])
14991 return true;
14992 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14993 return true;
14995 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14996 || (new_len == 33
14997 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14999 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
15000 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
15001 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
15002 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
15003 return true;
15004 if (delete_len == 21
15005 && delete_name[5] == new_name[3]
15006 && !memcmp (delete_name + 6, "St11align_val_t", 15))
15007 return true;
15008 if (delete_len == 34
15009 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
15010 return true;
15013 /* The negative result is conservative. */
15014 *pcertain = false;
15015 return false;
15018 /* Return the zero-based number corresponding to the argument being
15019 deallocated if FNDECL is a deallocation function or an out-of-bounds
15020 value if it isn't. */
15022 unsigned
15023 fndecl_dealloc_argno (tree fndecl)
15025 /* A call to operator delete isn't recognized as one to a built-in. */
15026 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
15028 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
15029 return 0;
15031 /* Avoid placement delete that's not been inlined. */
15032 tree fname = DECL_ASSEMBLER_NAME (fndecl);
15033 if (id_equal (fname, "_ZdlPvS_") // ordinary form
15034 || id_equal (fname, "_ZdaPvS_")) // array form
15035 return UINT_MAX;
15036 return 0;
15039 /* TODO: Handle user-defined functions with attribute malloc? Handle
15040 known non-built-ins like fopen? */
15041 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
15043 switch (DECL_FUNCTION_CODE (fndecl))
15045 case BUILT_IN_FREE:
15046 case BUILT_IN_REALLOC:
15047 case BUILT_IN_GOMP_FREE:
15048 case BUILT_IN_GOMP_REALLOC:
15049 return 0;
15050 default:
15051 break;
15053 return UINT_MAX;
15056 tree attrs = DECL_ATTRIBUTES (fndecl);
15057 if (!attrs)
15058 return UINT_MAX;
15060 for (tree atfree = attrs;
15061 (atfree = lookup_attribute ("*dealloc", atfree));
15062 atfree = TREE_CHAIN (atfree))
15064 tree alloc = TREE_VALUE (atfree);
15065 if (!alloc)
15066 continue;
15068 tree pos = TREE_CHAIN (alloc);
15069 if (!pos)
15070 return 0;
15072 pos = TREE_VALUE (pos);
15073 return TREE_INT_CST_LOW (pos) - 1;
15076 return UINT_MAX;
15079 /* If EXPR refers to a character array or pointer declared attribute
15080 nonstring, return a decl for that array or pointer and set *REF
15081 to the referenced enclosing object or pointer. Otherwise return
15082 null. */
15084 tree
15085 get_attr_nonstring_decl (tree expr, tree *ref)
15087 tree decl = expr;
15088 tree var = NULL_TREE;
15089 if (TREE_CODE (decl) == SSA_NAME)
15091 gimple *def = SSA_NAME_DEF_STMT (decl);
15093 if (is_gimple_assign (def))
15095 tree_code code = gimple_assign_rhs_code (def);
15096 if (code == ADDR_EXPR
15097 || code == COMPONENT_REF
15098 || code == VAR_DECL)
15099 decl = gimple_assign_rhs1 (def);
15101 else
15102 var = SSA_NAME_VAR (decl);
15105 if (TREE_CODE (decl) == ADDR_EXPR)
15106 decl = TREE_OPERAND (decl, 0);
15108 /* To simplify calling code, store the referenced DECL regardless of
15109 the attribute determined below, but avoid storing the SSA_NAME_VAR
15110 obtained above (it's not useful for dataflow purposes). */
15111 if (ref)
15112 *ref = decl;
15114 /* Use the SSA_NAME_VAR that was determined above to see if it's
15115 declared nonstring. Otherwise drill down into the referenced
15116 DECL. */
15117 if (var)
15118 decl = var;
15119 else if (TREE_CODE (decl) == ARRAY_REF)
15120 decl = TREE_OPERAND (decl, 0);
15121 else if (TREE_CODE (decl) == COMPONENT_REF)
15122 decl = TREE_OPERAND (decl, 1);
15123 else if (TREE_CODE (decl) == MEM_REF)
15124 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15126 if (DECL_P (decl)
15127 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15128 return decl;
15130 return NULL_TREE;
15133 /* Return length of attribute names string,
15134 if arglist chain > 1, -1 otherwise. */
15137 get_target_clone_attr_len (tree arglist)
15139 tree arg;
15140 int str_len_sum = 0;
15141 int argnum = 0;
15143 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15145 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15146 size_t len = strlen (str);
15147 str_len_sum += len + 1;
15148 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15149 argnum++;
15150 argnum++;
15152 if (argnum <= 1)
15153 return -1;
15154 return str_len_sum;
15157 void
15158 tree_cc_finalize (void)
15160 clear_nonstandard_integer_type_cache ();
15161 vec_free (bitint_type_cache);
15164 #if CHECKING_P
15166 namespace selftest {
15168 /* Selftests for tree. */
15170 /* Verify that integer constants are sane. */
15172 static void
15173 test_integer_constants ()
15175 ASSERT_TRUE (integer_type_node != NULL);
15176 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15178 tree type = integer_type_node;
15180 tree zero = build_zero_cst (type);
15181 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15182 ASSERT_EQ (type, TREE_TYPE (zero));
15184 tree one = build_int_cst (type, 1);
15185 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15186 ASSERT_EQ (type, TREE_TYPE (zero));
15189 /* Verify identifiers. */
15191 static void
15192 test_identifiers ()
15194 tree identifier = get_identifier ("foo");
15195 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15196 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15199 /* Verify LABEL_DECL. */
15201 static void
15202 test_labels ()
15204 tree identifier = get_identifier ("err");
15205 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15206 identifier, void_type_node);
15207 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15208 ASSERT_FALSE (FORCED_LABEL (label_decl));
15211 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15212 are given by VALS. */
15214 static tree
15215 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15217 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15218 tree_vector_builder builder (type, vals.length (), 1);
15219 builder.splice (vals);
15220 return builder.build ();
15223 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15225 static void
15226 check_vector_cst (const vec<tree> &expected, tree actual)
15228 ASSERT_KNOWN_EQ (expected.length (),
15229 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15230 for (unsigned int i = 0; i < expected.length (); ++i)
15231 ASSERT_EQ (wi::to_wide (expected[i]),
15232 wi::to_wide (vector_cst_elt (actual, i)));
15235 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15236 and that its elements match EXPECTED. */
15238 static void
15239 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15240 unsigned int npatterns)
15242 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15243 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15244 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15245 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15246 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15247 check_vector_cst (expected, actual);
15250 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15251 and NPATTERNS background elements, and that its elements match
15252 EXPECTED. */
15254 static void
15255 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15256 unsigned int npatterns)
15258 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15259 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15260 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15261 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15262 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15263 check_vector_cst (expected, actual);
15266 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15267 and that its elements match EXPECTED. */
15269 static void
15270 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15271 unsigned int npatterns)
15273 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15274 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15275 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15276 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15277 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15278 check_vector_cst (expected, actual);
15281 /* Test the creation of VECTOR_CSTs. */
15283 static void
15284 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15286 auto_vec<tree, 8> elements (8);
15287 elements.quick_grow (8);
15288 tree element_type = build_nonstandard_integer_type (16, true);
15289 tree vector_type = build_vector_type (element_type, 8);
15291 /* Test a simple linear series with a base of 0 and a step of 1:
15292 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15293 for (unsigned int i = 0; i < 8; ++i)
15294 elements[i] = build_int_cst (element_type, i);
15295 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15296 check_vector_cst_stepped (elements, vector, 1);
15298 /* Try the same with the first element replaced by 100:
15299 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15300 elements[0] = build_int_cst (element_type, 100);
15301 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15302 check_vector_cst_stepped (elements, vector, 1);
15304 /* Try a series that wraps around.
15305 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15306 for (unsigned int i = 1; i < 8; ++i)
15307 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15308 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15309 check_vector_cst_stepped (elements, vector, 1);
15311 /* Try a downward series:
15312 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15313 for (unsigned int i = 1; i < 8; ++i)
15314 elements[i] = build_int_cst (element_type, 80 - i);
15315 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15316 check_vector_cst_stepped (elements, vector, 1);
15318 /* Try two interleaved series with different bases and steps:
15319 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15320 elements[1] = build_int_cst (element_type, 53);
15321 for (unsigned int i = 2; i < 8; i += 2)
15323 elements[i] = build_int_cst (element_type, 70 - i * 2);
15324 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15326 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15327 check_vector_cst_stepped (elements, vector, 2);
15329 /* Try a duplicated value:
15330 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15331 for (unsigned int i = 1; i < 8; ++i)
15332 elements[i] = elements[0];
15333 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15334 check_vector_cst_duplicate (elements, vector, 1);
15336 /* Try an interleaved duplicated value:
15337 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15338 elements[1] = build_int_cst (element_type, 55);
15339 for (unsigned int i = 2; i < 8; ++i)
15340 elements[i] = elements[i - 2];
15341 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15342 check_vector_cst_duplicate (elements, vector, 2);
15344 /* Try a duplicated value with 2 exceptions
15345 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15346 elements[0] = build_int_cst (element_type, 41);
15347 elements[1] = build_int_cst (element_type, 97);
15348 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15349 check_vector_cst_fill (elements, vector, 2);
15351 /* Try with and without a step
15352 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15353 for (unsigned int i = 3; i < 8; i += 2)
15354 elements[i] = build_int_cst (element_type, i * 7);
15355 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15356 check_vector_cst_stepped (elements, vector, 2);
15358 /* Try a fully-general constant:
15359 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15360 elements[5] = build_int_cst (element_type, 9990);
15361 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15362 check_vector_cst_fill (elements, vector, 4);
15365 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15366 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15367 modifying its argument in-place. */
15369 static void
15370 check_strip_nops (tree node, tree expected)
15372 STRIP_NOPS (node);
15373 ASSERT_EQ (expected, node);
15376 /* Verify location wrappers. */
15378 static void
15379 test_location_wrappers ()
15381 location_t loc = BUILTINS_LOCATION;
15383 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15385 /* Wrapping a constant. */
15386 tree int_cst = build_int_cst (integer_type_node, 42);
15387 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15388 ASSERT_FALSE (location_wrapper_p (int_cst));
15390 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15391 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15392 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15393 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15395 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15396 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15398 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15399 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15400 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15401 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15403 /* Wrapping a STRING_CST. */
15404 tree string_cst = build_string (4, "foo");
15405 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15406 ASSERT_FALSE (location_wrapper_p (string_cst));
15408 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15409 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15410 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15411 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15412 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15415 /* Wrapping a variable. */
15416 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15417 get_identifier ("some_int_var"),
15418 integer_type_node);
15419 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15420 ASSERT_FALSE (location_wrapper_p (int_var));
15422 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15423 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15424 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15425 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15427 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15428 wrapper. */
15429 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15430 ASSERT_FALSE (location_wrapper_p (r_cast));
15431 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15433 /* Verify that STRIP_NOPS removes wrappers. */
15434 check_strip_nops (wrapped_int_cst, int_cst);
15435 check_strip_nops (wrapped_string_cst, string_cst);
15436 check_strip_nops (wrapped_int_var, int_var);
15439 /* Test various tree predicates. Verify that location wrappers don't
15440 affect the results. */
15442 static void
15443 test_predicates ()
15445 /* Build various constants and wrappers around them. */
15447 location_t loc = BUILTINS_LOCATION;
15449 tree i_0 = build_int_cst (integer_type_node, 0);
15450 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15452 tree i_1 = build_int_cst (integer_type_node, 1);
15453 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15455 tree i_m1 = build_int_cst (integer_type_node, -1);
15456 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15458 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15459 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15460 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15461 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15462 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15463 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15465 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15466 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15467 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15469 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15470 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15471 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15473 /* TODO: vector constants. */
15475 /* Test integer_onep. */
15476 ASSERT_FALSE (integer_onep (i_0));
15477 ASSERT_FALSE (integer_onep (wr_i_0));
15478 ASSERT_TRUE (integer_onep (i_1));
15479 ASSERT_TRUE (integer_onep (wr_i_1));
15480 ASSERT_FALSE (integer_onep (i_m1));
15481 ASSERT_FALSE (integer_onep (wr_i_m1));
15482 ASSERT_FALSE (integer_onep (f_0));
15483 ASSERT_FALSE (integer_onep (wr_f_0));
15484 ASSERT_FALSE (integer_onep (f_1));
15485 ASSERT_FALSE (integer_onep (wr_f_1));
15486 ASSERT_FALSE (integer_onep (f_m1));
15487 ASSERT_FALSE (integer_onep (wr_f_m1));
15488 ASSERT_FALSE (integer_onep (c_i_0));
15489 ASSERT_TRUE (integer_onep (c_i_1));
15490 ASSERT_FALSE (integer_onep (c_i_m1));
15491 ASSERT_FALSE (integer_onep (c_f_0));
15492 ASSERT_FALSE (integer_onep (c_f_1));
15493 ASSERT_FALSE (integer_onep (c_f_m1));
15495 /* Test integer_zerop. */
15496 ASSERT_TRUE (integer_zerop (i_0));
15497 ASSERT_TRUE (integer_zerop (wr_i_0));
15498 ASSERT_FALSE (integer_zerop (i_1));
15499 ASSERT_FALSE (integer_zerop (wr_i_1));
15500 ASSERT_FALSE (integer_zerop (i_m1));
15501 ASSERT_FALSE (integer_zerop (wr_i_m1));
15502 ASSERT_FALSE (integer_zerop (f_0));
15503 ASSERT_FALSE (integer_zerop (wr_f_0));
15504 ASSERT_FALSE (integer_zerop (f_1));
15505 ASSERT_FALSE (integer_zerop (wr_f_1));
15506 ASSERT_FALSE (integer_zerop (f_m1));
15507 ASSERT_FALSE (integer_zerop (wr_f_m1));
15508 ASSERT_TRUE (integer_zerop (c_i_0));
15509 ASSERT_FALSE (integer_zerop (c_i_1));
15510 ASSERT_FALSE (integer_zerop (c_i_m1));
15511 ASSERT_FALSE (integer_zerop (c_f_0));
15512 ASSERT_FALSE (integer_zerop (c_f_1));
15513 ASSERT_FALSE (integer_zerop (c_f_m1));
15515 /* Test integer_all_onesp. */
15516 ASSERT_FALSE (integer_all_onesp (i_0));
15517 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15518 ASSERT_FALSE (integer_all_onesp (i_1));
15519 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15520 ASSERT_TRUE (integer_all_onesp (i_m1));
15521 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15522 ASSERT_FALSE (integer_all_onesp (f_0));
15523 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15524 ASSERT_FALSE (integer_all_onesp (f_1));
15525 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15526 ASSERT_FALSE (integer_all_onesp (f_m1));
15527 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15528 ASSERT_FALSE (integer_all_onesp (c_i_0));
15529 ASSERT_FALSE (integer_all_onesp (c_i_1));
15530 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15531 ASSERT_FALSE (integer_all_onesp (c_f_0));
15532 ASSERT_FALSE (integer_all_onesp (c_f_1));
15533 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15535 /* Test integer_minus_onep. */
15536 ASSERT_FALSE (integer_minus_onep (i_0));
15537 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15538 ASSERT_FALSE (integer_minus_onep (i_1));
15539 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15540 ASSERT_TRUE (integer_minus_onep (i_m1));
15541 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15542 ASSERT_FALSE (integer_minus_onep (f_0));
15543 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15544 ASSERT_FALSE (integer_minus_onep (f_1));
15545 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15546 ASSERT_FALSE (integer_minus_onep (f_m1));
15547 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15548 ASSERT_FALSE (integer_minus_onep (c_i_0));
15549 ASSERT_FALSE (integer_minus_onep (c_i_1));
15550 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15551 ASSERT_FALSE (integer_minus_onep (c_f_0));
15552 ASSERT_FALSE (integer_minus_onep (c_f_1));
15553 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15555 /* Test integer_each_onep. */
15556 ASSERT_FALSE (integer_each_onep (i_0));
15557 ASSERT_FALSE (integer_each_onep (wr_i_0));
15558 ASSERT_TRUE (integer_each_onep (i_1));
15559 ASSERT_TRUE (integer_each_onep (wr_i_1));
15560 ASSERT_FALSE (integer_each_onep (i_m1));
15561 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15562 ASSERT_FALSE (integer_each_onep (f_0));
15563 ASSERT_FALSE (integer_each_onep (wr_f_0));
15564 ASSERT_FALSE (integer_each_onep (f_1));
15565 ASSERT_FALSE (integer_each_onep (wr_f_1));
15566 ASSERT_FALSE (integer_each_onep (f_m1));
15567 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15568 ASSERT_FALSE (integer_each_onep (c_i_0));
15569 ASSERT_FALSE (integer_each_onep (c_i_1));
15570 ASSERT_FALSE (integer_each_onep (c_i_m1));
15571 ASSERT_FALSE (integer_each_onep (c_f_0));
15572 ASSERT_FALSE (integer_each_onep (c_f_1));
15573 ASSERT_FALSE (integer_each_onep (c_f_m1));
15575 /* Test integer_truep. */
15576 ASSERT_FALSE (integer_truep (i_0));
15577 ASSERT_FALSE (integer_truep (wr_i_0));
15578 ASSERT_TRUE (integer_truep (i_1));
15579 ASSERT_TRUE (integer_truep (wr_i_1));
15580 ASSERT_FALSE (integer_truep (i_m1));
15581 ASSERT_FALSE (integer_truep (wr_i_m1));
15582 ASSERT_FALSE (integer_truep (f_0));
15583 ASSERT_FALSE (integer_truep (wr_f_0));
15584 ASSERT_FALSE (integer_truep (f_1));
15585 ASSERT_FALSE (integer_truep (wr_f_1));
15586 ASSERT_FALSE (integer_truep (f_m1));
15587 ASSERT_FALSE (integer_truep (wr_f_m1));
15588 ASSERT_FALSE (integer_truep (c_i_0));
15589 ASSERT_TRUE (integer_truep (c_i_1));
15590 ASSERT_FALSE (integer_truep (c_i_m1));
15591 ASSERT_FALSE (integer_truep (c_f_0));
15592 ASSERT_FALSE (integer_truep (c_f_1));
15593 ASSERT_FALSE (integer_truep (c_f_m1));
15595 /* Test integer_nonzerop. */
15596 ASSERT_FALSE (integer_nonzerop (i_0));
15597 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15598 ASSERT_TRUE (integer_nonzerop (i_1));
15599 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15600 ASSERT_TRUE (integer_nonzerop (i_m1));
15601 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15602 ASSERT_FALSE (integer_nonzerop (f_0));
15603 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15604 ASSERT_FALSE (integer_nonzerop (f_1));
15605 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15606 ASSERT_FALSE (integer_nonzerop (f_m1));
15607 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15608 ASSERT_FALSE (integer_nonzerop (c_i_0));
15609 ASSERT_TRUE (integer_nonzerop (c_i_1));
15610 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15611 ASSERT_FALSE (integer_nonzerop (c_f_0));
15612 ASSERT_FALSE (integer_nonzerop (c_f_1));
15613 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15615 /* Test real_zerop. */
15616 ASSERT_FALSE (real_zerop (i_0));
15617 ASSERT_FALSE (real_zerop (wr_i_0));
15618 ASSERT_FALSE (real_zerop (i_1));
15619 ASSERT_FALSE (real_zerop (wr_i_1));
15620 ASSERT_FALSE (real_zerop (i_m1));
15621 ASSERT_FALSE (real_zerop (wr_i_m1));
15622 ASSERT_TRUE (real_zerop (f_0));
15623 ASSERT_TRUE (real_zerop (wr_f_0));
15624 ASSERT_FALSE (real_zerop (f_1));
15625 ASSERT_FALSE (real_zerop (wr_f_1));
15626 ASSERT_FALSE (real_zerop (f_m1));
15627 ASSERT_FALSE (real_zerop (wr_f_m1));
15628 ASSERT_FALSE (real_zerop (c_i_0));
15629 ASSERT_FALSE (real_zerop (c_i_1));
15630 ASSERT_FALSE (real_zerop (c_i_m1));
15631 ASSERT_TRUE (real_zerop (c_f_0));
15632 ASSERT_FALSE (real_zerop (c_f_1));
15633 ASSERT_FALSE (real_zerop (c_f_m1));
15635 /* Test real_onep. */
15636 ASSERT_FALSE (real_onep (i_0));
15637 ASSERT_FALSE (real_onep (wr_i_0));
15638 ASSERT_FALSE (real_onep (i_1));
15639 ASSERT_FALSE (real_onep (wr_i_1));
15640 ASSERT_FALSE (real_onep (i_m1));
15641 ASSERT_FALSE (real_onep (wr_i_m1));
15642 ASSERT_FALSE (real_onep (f_0));
15643 ASSERT_FALSE (real_onep (wr_f_0));
15644 ASSERT_TRUE (real_onep (f_1));
15645 ASSERT_TRUE (real_onep (wr_f_1));
15646 ASSERT_FALSE (real_onep (f_m1));
15647 ASSERT_FALSE (real_onep (wr_f_m1));
15648 ASSERT_FALSE (real_onep (c_i_0));
15649 ASSERT_FALSE (real_onep (c_i_1));
15650 ASSERT_FALSE (real_onep (c_i_m1));
15651 ASSERT_FALSE (real_onep (c_f_0));
15652 ASSERT_TRUE (real_onep (c_f_1));
15653 ASSERT_FALSE (real_onep (c_f_m1));
15655 /* Test real_minus_onep. */
15656 ASSERT_FALSE (real_minus_onep (i_0));
15657 ASSERT_FALSE (real_minus_onep (wr_i_0));
15658 ASSERT_FALSE (real_minus_onep (i_1));
15659 ASSERT_FALSE (real_minus_onep (wr_i_1));
15660 ASSERT_FALSE (real_minus_onep (i_m1));
15661 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15662 ASSERT_FALSE (real_minus_onep (f_0));
15663 ASSERT_FALSE (real_minus_onep (wr_f_0));
15664 ASSERT_FALSE (real_minus_onep (f_1));
15665 ASSERT_FALSE (real_minus_onep (wr_f_1));
15666 ASSERT_TRUE (real_minus_onep (f_m1));
15667 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15668 ASSERT_FALSE (real_minus_onep (c_i_0));
15669 ASSERT_FALSE (real_minus_onep (c_i_1));
15670 ASSERT_FALSE (real_minus_onep (c_i_m1));
15671 ASSERT_FALSE (real_minus_onep (c_f_0));
15672 ASSERT_FALSE (real_minus_onep (c_f_1));
15673 ASSERT_TRUE (real_minus_onep (c_f_m1));
15675 /* Test zerop. */
15676 ASSERT_TRUE (zerop (i_0));
15677 ASSERT_TRUE (zerop (wr_i_0));
15678 ASSERT_FALSE (zerop (i_1));
15679 ASSERT_FALSE (zerop (wr_i_1));
15680 ASSERT_FALSE (zerop (i_m1));
15681 ASSERT_FALSE (zerop (wr_i_m1));
15682 ASSERT_TRUE (zerop (f_0));
15683 ASSERT_TRUE (zerop (wr_f_0));
15684 ASSERT_FALSE (zerop (f_1));
15685 ASSERT_FALSE (zerop (wr_f_1));
15686 ASSERT_FALSE (zerop (f_m1));
15687 ASSERT_FALSE (zerop (wr_f_m1));
15688 ASSERT_TRUE (zerop (c_i_0));
15689 ASSERT_FALSE (zerop (c_i_1));
15690 ASSERT_FALSE (zerop (c_i_m1));
15691 ASSERT_TRUE (zerop (c_f_0));
15692 ASSERT_FALSE (zerop (c_f_1));
15693 ASSERT_FALSE (zerop (c_f_m1));
15695 /* Test tree_expr_nonnegative_p. */
15696 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15697 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15698 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15699 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15700 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15701 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15702 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15703 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15704 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15705 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15706 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15707 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15708 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15709 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15710 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15711 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15712 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15713 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15715 /* Test tree_expr_nonzero_p. */
15716 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15717 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15718 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15719 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15720 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15721 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15723 /* Test integer_valued_real_p. */
15724 ASSERT_FALSE (integer_valued_real_p (i_0));
15725 ASSERT_TRUE (integer_valued_real_p (f_0));
15726 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15727 ASSERT_TRUE (integer_valued_real_p (f_1));
15728 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15730 /* Test integer_pow2p. */
15731 ASSERT_FALSE (integer_pow2p (i_0));
15732 ASSERT_TRUE (integer_pow2p (i_1));
15733 ASSERT_TRUE (integer_pow2p (wr_i_1));
15735 /* Test uniform_integer_cst_p. */
15736 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15737 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15738 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15739 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15740 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15741 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15742 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15743 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15744 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15745 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15746 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15747 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15748 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15749 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15750 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15751 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15752 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15753 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15756 /* Check that string escaping works correctly. */
15758 static void
15759 test_escaped_strings (void)
15761 int saved_cutoff;
15762 escaped_string msg;
15764 msg.escape (NULL);
15765 /* ASSERT_STREQ does not accept NULL as a valid test
15766 result, so we have to use ASSERT_EQ instead. */
15767 ASSERT_EQ (NULL, (const char *) msg);
15769 msg.escape ("");
15770 ASSERT_STREQ ("", (const char *) msg);
15772 msg.escape ("foobar");
15773 ASSERT_STREQ ("foobar", (const char *) msg);
15775 /* Ensure that we have -fmessage-length set to 0. */
15776 saved_cutoff = pp_line_cutoff (global_dc->printer);
15777 pp_line_cutoff (global_dc->printer) = 0;
15779 msg.escape ("foo\nbar");
15780 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15782 msg.escape ("\a\b\f\n\r\t\v");
15783 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15785 /* Now repeat the tests with -fmessage-length set to 5. */
15786 pp_line_cutoff (global_dc->printer) = 5;
15788 /* Note that the newline is not translated into an escape. */
15789 msg.escape ("foo\nbar");
15790 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15792 msg.escape ("\a\b\f\n\r\t\v");
15793 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15795 /* Restore the original message length setting. */
15796 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15799 /* Run all of the selftests within this file. */
15801 void
15802 tree_cc_tests ()
15804 test_integer_constants ();
15805 test_identifiers ();
15806 test_labels ();
15807 test_vector_cst_patterns ();
15808 test_location_wrappers ();
15809 test_predicates ();
15810 test_escaped_strings ();
15813 } // namespace selftest
15815 #endif /* CHECKING_P */
15817 #include "gt-tree.h"