rust: build failure after NON_DEPENDENT_EXPR removal [PR111899]
[official-gcc.git] / gcc / tree.cc
blobf7bfd9e3451b3b19675bd0d856b4074e6e266cfe
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 1, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 2, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_DEVICE_TYPE */
302 0, /* OMP_CLAUSE_FOR */
303 0, /* OMP_CLAUSE_PARALLEL */
304 0, /* OMP_CLAUSE_SECTIONS */
305 0, /* OMP_CLAUSE_TASKGROUP */
306 1, /* OMP_CLAUSE_PRIORITY */
307 1, /* OMP_CLAUSE_GRAINSIZE */
308 1, /* OMP_CLAUSE_NUM_TASKS */
309 0, /* OMP_CLAUSE_NOGROUP */
310 0, /* OMP_CLAUSE_THREADS */
311 0, /* OMP_CLAUSE_SIMD */
312 1, /* OMP_CLAUSE_HINT */
313 0, /* OMP_CLAUSE_DEFAULTMAP */
314 0, /* OMP_CLAUSE_ORDER */
315 0, /* OMP_CLAUSE_BIND */
316 1, /* OMP_CLAUSE_FILTER */
317 1, /* OMP_CLAUSE__SIMDUID_ */
318 0, /* OMP_CLAUSE__SIMT_ */
319 0, /* OMP_CLAUSE_INDEPENDENT */
320 1, /* OMP_CLAUSE_WORKER */
321 1, /* OMP_CLAUSE_VECTOR */
322 1, /* OMP_CLAUSE_NUM_GANGS */
323 1, /* OMP_CLAUSE_NUM_WORKERS */
324 1, /* OMP_CLAUSE_VECTOR_LENGTH */
325 3, /* OMP_CLAUSE_TILE */
326 0, /* OMP_CLAUSE_IF_PRESENT */
327 0, /* OMP_CLAUSE_FINALIZE */
328 0, /* OMP_CLAUSE_NOHOST */
331 const char * const omp_clause_code_name[] =
333 "error_clause",
334 "private",
335 "shared",
336 "firstprivate",
337 "lastprivate",
338 "reduction",
339 "task_reduction",
340 "in_reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "affinity",
345 "aligned",
346 "allocate",
347 "depend",
348 "nontemporal",
349 "uniform",
350 "enter",
351 "link",
352 "detach",
353 "use_device_ptr",
354 "use_device_addr",
355 "is_device_ptr",
356 "inclusive",
357 "exclusive",
358 "from",
359 "to",
360 "map",
361 "has_device_addr",
362 "doacross",
363 "_cache_",
364 "gang",
365 "async",
366 "wait",
367 "auto",
368 "seq",
369 "_looptemp_",
370 "_reductemp_",
371 "_condtemp_",
372 "_scantemp_",
373 "if",
374 "num_threads",
375 "schedule",
376 "nowait",
377 "ordered",
378 "default",
379 "collapse",
380 "untied",
381 "final",
382 "mergeable",
383 "device",
384 "dist_schedule",
385 "inbranch",
386 "notinbranch",
387 "num_teams",
388 "thread_limit",
389 "proc_bind",
390 "safelen",
391 "simdlen",
392 "device_type",
393 "for",
394 "parallel",
395 "sections",
396 "taskgroup",
397 "priority",
398 "grainsize",
399 "num_tasks",
400 "nogroup",
401 "threads",
402 "simd",
403 "hint",
404 "defaultmap",
405 "order",
406 "bind",
407 "filter",
408 "_simduid_",
409 "_simt_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length",
416 "tile",
417 "if_present",
418 "finalize",
419 "nohost",
422 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
423 clause names, but for use in diagnostics etc. would like to use the "user"
424 clause names. */
426 const char *
427 user_omp_clause_code_name (tree clause, bool oacc)
429 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
430 distinguish clauses as seen by the user. See also where front ends do
431 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
432 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
433 switch (OMP_CLAUSE_MAP_KIND (clause))
435 case GOMP_MAP_FORCE_ALLOC:
436 case GOMP_MAP_ALLOC: return "create";
437 case GOMP_MAP_FORCE_TO:
438 case GOMP_MAP_TO: return "copyin";
439 case GOMP_MAP_FORCE_FROM:
440 case GOMP_MAP_FROM: return "copyout";
441 case GOMP_MAP_FORCE_TOFROM:
442 case GOMP_MAP_TOFROM: return "copy";
443 case GOMP_MAP_RELEASE: return "delete";
444 case GOMP_MAP_FORCE_PRESENT: return "present";
445 case GOMP_MAP_ATTACH: return "attach";
446 case GOMP_MAP_FORCE_DETACH:
447 case GOMP_MAP_DETACH: return "detach";
448 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
449 case GOMP_MAP_LINK: return "link";
450 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
451 default: break;
454 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.cc. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.cc:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case BITINT_TYPE:
995 case LANG_TYPE: return sizeof (tree_type_non_common);
996 default:
997 gcc_checking_assert (code >= NUM_TREE_CODES);
998 return lang_hooks.tree_size (code);
1001 case tcc_reference: /* a reference */
1002 case tcc_expression: /* an expression */
1003 case tcc_statement: /* an expression with side effects */
1004 case tcc_comparison: /* a comparison expression */
1005 case tcc_unary: /* a unary arithmetic expression */
1006 case tcc_binary: /* a binary arithmetic expression */
1007 return (sizeof (struct tree_exp)
1008 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1010 case tcc_constant: /* a constant */
1011 switch (code)
1013 case VOID_CST: return sizeof (tree_typed);
1014 case INTEGER_CST: gcc_unreachable ();
1015 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1016 case REAL_CST: return sizeof (tree_real_cst);
1017 case FIXED_CST: return sizeof (tree_fixed_cst);
1018 case COMPLEX_CST: return sizeof (tree_complex);
1019 case VECTOR_CST: gcc_unreachable ();
1020 case STRING_CST: gcc_unreachable ();
1021 default:
1022 gcc_checking_assert (code >= NUM_TREE_CODES);
1023 return lang_hooks.tree_size (code);
1026 case tcc_exceptional: /* something random, like an identifier. */
1027 switch (code)
1029 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1030 case TREE_LIST: return sizeof (tree_list);
1032 case ERROR_MARK:
1033 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1035 case TREE_VEC: gcc_unreachable ();
1036 case OMP_CLAUSE: gcc_unreachable ();
1038 case SSA_NAME: return sizeof (tree_ssa_name);
1040 case STATEMENT_LIST: return sizeof (tree_statement_list);
1041 case BLOCK: return sizeof (struct tree_block);
1042 case CONSTRUCTOR: return sizeof (tree_constructor);
1043 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1044 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1046 default:
1047 gcc_checking_assert (code >= NUM_TREE_CODES);
1048 return lang_hooks.tree_size (code);
1051 default:
1052 gcc_unreachable ();
1056 /* Compute the number of bytes occupied by NODE. This routine only
1057 looks at TREE_CODE, except for those nodes that have variable sizes. */
1058 size_t
1059 tree_size (const_tree node)
1061 const enum tree_code code = TREE_CODE (node);
1062 switch (code)
1064 case INTEGER_CST:
1065 return (sizeof (struct tree_int_cst)
1066 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1068 case TREE_BINFO:
1069 return (offsetof (struct tree_binfo, base_binfos)
1070 + vec<tree, va_gc>
1071 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1073 case TREE_VEC:
1074 return (sizeof (struct tree_vec)
1075 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1077 case VECTOR_CST:
1078 return (sizeof (struct tree_vector)
1079 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1081 case STRING_CST:
1082 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1084 case OMP_CLAUSE:
1085 return (sizeof (struct tree_omp_clause)
1086 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1087 * sizeof (tree));
1089 default:
1090 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1091 return (sizeof (struct tree_exp)
1092 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1093 else
1094 return tree_code_size (code);
1098 /* Return tree node kind based on tree CODE. */
1100 static tree_node_kind
1101 get_stats_node_kind (enum tree_code code)
1103 enum tree_code_class type = TREE_CODE_CLASS (code);
1105 switch (type)
1107 case tcc_declaration: /* A decl node */
1108 return d_kind;
1109 case tcc_type: /* a type node */
1110 return t_kind;
1111 case tcc_statement: /* an expression with side effects */
1112 return s_kind;
1113 case tcc_reference: /* a reference */
1114 return r_kind;
1115 case tcc_expression: /* an expression */
1116 case tcc_comparison: /* a comparison expression */
1117 case tcc_unary: /* a unary arithmetic expression */
1118 case tcc_binary: /* a binary arithmetic expression */
1119 return e_kind;
1120 case tcc_constant: /* a constant */
1121 return c_kind;
1122 case tcc_exceptional: /* something random, like an identifier. */
1123 switch (code)
1125 case IDENTIFIER_NODE:
1126 return id_kind;
1127 case TREE_VEC:
1128 return vec_kind;
1129 case TREE_BINFO:
1130 return binfo_kind;
1131 case SSA_NAME:
1132 return ssa_name_kind;
1133 case BLOCK:
1134 return b_kind;
1135 case CONSTRUCTOR:
1136 return constr_kind;
1137 case OMP_CLAUSE:
1138 return omp_clause_kind;
1139 default:
1140 return x_kind;
1142 break;
1143 case tcc_vl_exp:
1144 return e_kind;
1145 default:
1146 gcc_unreachable ();
1150 /* Record interesting allocation statistics for a tree node with CODE
1151 and LENGTH. */
1153 static void
1154 record_node_allocation_statistics (enum tree_code code, size_t length)
1156 if (!GATHER_STATISTICS)
1157 return;
1159 tree_node_kind kind = get_stats_node_kind (code);
1161 tree_code_counts[(int) code]++;
1162 tree_node_counts[(int) kind]++;
1163 tree_node_sizes[(int) kind] += length;
1166 /* Allocate and return a new UID from the DECL_UID namespace. */
1169 allocate_decl_uid (void)
1171 return next_decl_uid++;
1174 /* Return a newly allocated node of code CODE. For decl and type
1175 nodes, some other fields are initialized. The rest of the node is
1176 initialized to zero. This function cannot be used for TREE_VEC,
1177 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1178 tree_code_size.
1180 Achoo! I got a code in the node. */
1182 tree
1183 make_node (enum tree_code code MEM_STAT_DECL)
1185 tree t;
1186 enum tree_code_class type = TREE_CODE_CLASS (code);
1187 size_t length = tree_code_size (code);
1189 record_node_allocation_statistics (code, length);
1191 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1192 TREE_SET_CODE (t, code);
1194 switch (type)
1196 case tcc_statement:
1197 if (code != DEBUG_BEGIN_STMT)
1198 TREE_SIDE_EFFECTS (t) = 1;
1199 break;
1201 case tcc_declaration:
1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1204 if (code == FUNCTION_DECL)
1206 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1207 SET_DECL_MODE (t, FUNCTION_MODE);
1209 else
1210 SET_DECL_ALIGN (t, 1);
1212 DECL_SOURCE_LOCATION (t) = input_location;
1213 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1214 DECL_UID (t) = --next_debug_decl_uid;
1215 else
1217 DECL_UID (t) = allocate_decl_uid ();
1218 SET_DECL_PT_UID (t, -1);
1220 if (TREE_CODE (t) == LABEL_DECL)
1221 LABEL_DECL_UID (t) = -1;
1223 break;
1225 case tcc_type:
1226 TYPE_UID (t) = next_type_uid++;
1227 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1228 TYPE_USER_ALIGN (t) = 0;
1229 TYPE_MAIN_VARIANT (t) = t;
1230 TYPE_CANONICAL (t) = t;
1232 /* Default to no attributes for type, but let target change that. */
1233 TYPE_ATTRIBUTES (t) = NULL_TREE;
1234 targetm.set_default_type_attributes (t);
1236 /* We have not yet computed the alias set for this type. */
1237 TYPE_ALIAS_SET (t) = -1;
1238 break;
1240 case tcc_constant:
1241 TREE_CONSTANT (t) = 1;
1242 break;
1244 case tcc_expression:
1245 switch (code)
1247 case INIT_EXPR:
1248 case MODIFY_EXPR:
1249 case VA_ARG_EXPR:
1250 case PREDECREMENT_EXPR:
1251 case PREINCREMENT_EXPR:
1252 case POSTDECREMENT_EXPR:
1253 case POSTINCREMENT_EXPR:
1254 /* All of these have side-effects, no matter what their
1255 operands are. */
1256 TREE_SIDE_EFFECTS (t) = 1;
1257 break;
1259 default:
1260 break;
1262 break;
1264 case tcc_exceptional:
1265 switch (code)
1267 case TARGET_OPTION_NODE:
1268 TREE_TARGET_OPTION(t)
1269 = ggc_cleared_alloc<struct cl_target_option> ();
1270 break;
1272 case OPTIMIZATION_NODE:
1273 TREE_OPTIMIZATION (t)
1274 = ggc_cleared_alloc<struct cl_optimization> ();
1275 break;
1277 default:
1278 break;
1280 break;
1282 default:
1283 /* Other classes need no special treatment. */
1284 break;
1287 return t;
1290 /* Free tree node. */
1292 void
1293 free_node (tree node)
1295 enum tree_code code = TREE_CODE (node);
1296 if (GATHER_STATISTICS)
1298 enum tree_node_kind kind = get_stats_node_kind (code);
1300 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1301 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1302 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1304 tree_code_counts[(int) TREE_CODE (node)]--;
1305 tree_node_counts[(int) kind]--;
1306 tree_node_sizes[(int) kind] -= tree_size (node);
1308 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1309 vec_free (CONSTRUCTOR_ELTS (node));
1310 else if (code == BLOCK)
1311 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1312 else if (code == TREE_BINFO)
1313 vec_free (BINFO_BASE_ACCESSES (node));
1314 else if (code == OPTIMIZATION_NODE)
1315 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1316 else if (code == TARGET_OPTION_NODE)
1317 cl_target_option_free (TREE_TARGET_OPTION (node));
1318 ggc_free (node);
1321 /* Return a new node with the same contents as NODE except that its
1322 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1324 tree
1325 copy_node (tree node MEM_STAT_DECL)
1327 tree t;
1328 enum tree_code code = TREE_CODE (node);
1329 size_t length;
1331 gcc_assert (code != STATEMENT_LIST);
1333 length = tree_size (node);
1334 record_node_allocation_statistics (code, length);
1335 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1336 memcpy (t, node, length);
1338 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1339 TREE_CHAIN (t) = 0;
1340 TREE_ASM_WRITTEN (t) = 0;
1341 TREE_VISITED (t) = 0;
1343 if (TREE_CODE_CLASS (code) == tcc_declaration)
1345 if (code == DEBUG_EXPR_DECL)
1346 DECL_UID (t) = --next_debug_decl_uid;
1347 else
1349 DECL_UID (t) = allocate_decl_uid ();
1350 if (DECL_PT_UID_SET_P (node))
1351 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1353 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1354 && DECL_HAS_VALUE_EXPR_P (node))
1356 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1357 DECL_HAS_VALUE_EXPR_P (t) = 1;
1359 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1360 if (VAR_P (node))
1362 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1363 t->decl_with_vis.symtab_node = NULL;
1365 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1367 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1368 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1370 if (TREE_CODE (node) == FUNCTION_DECL)
1372 DECL_STRUCT_FUNCTION (t) = NULL;
1373 t->decl_with_vis.symtab_node = NULL;
1376 else if (TREE_CODE_CLASS (code) == tcc_type)
1378 TYPE_UID (t) = next_type_uid++;
1379 /* The following is so that the debug code for
1380 the copy is different from the original type.
1381 The two statements usually duplicate each other
1382 (because they clear fields of the same union),
1383 but the optimizer should catch that. */
1384 TYPE_SYMTAB_ADDRESS (t) = 0;
1385 TYPE_SYMTAB_DIE (t) = 0;
1387 /* Do not copy the values cache. */
1388 if (TYPE_CACHED_VALUES_P (t))
1390 TYPE_CACHED_VALUES_P (t) = 0;
1391 TYPE_CACHED_VALUES (t) = NULL_TREE;
1394 else if (code == TARGET_OPTION_NODE)
1396 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1397 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1398 sizeof (struct cl_target_option));
1400 else if (code == OPTIMIZATION_NODE)
1402 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1403 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1404 sizeof (struct cl_optimization));
1407 return t;
1410 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1411 For example, this can copy a list made of TREE_LIST nodes. */
1413 tree
1414 copy_list (tree list)
1416 tree head;
1417 tree prev, next;
1419 if (list == 0)
1420 return 0;
1422 head = prev = copy_node (list);
1423 next = TREE_CHAIN (list);
1424 while (next)
1426 TREE_CHAIN (prev) = copy_node (next);
1427 prev = TREE_CHAIN (prev);
1428 next = TREE_CHAIN (next);
1430 return head;
1434 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1435 INTEGER_CST with value CST and type TYPE. */
1437 static unsigned int
1438 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1440 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1441 /* We need extra HWIs if CST is an unsigned integer with its
1442 upper bit set. */
1443 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1444 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1445 return cst.get_len ();
1448 /* Return a new INTEGER_CST with value CST and type TYPE. */
1450 static tree
1451 build_new_int_cst (tree type, const wide_int &cst)
1453 unsigned int len = cst.get_len ();
1454 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1455 tree nt = make_int_cst (len, ext_len);
1457 if (len < ext_len)
1459 --ext_len;
1460 TREE_INT_CST_ELT (nt, ext_len)
1461 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1462 for (unsigned int i = len; i < ext_len; ++i)
1463 TREE_INT_CST_ELT (nt, i) = -1;
1465 else if (TYPE_UNSIGNED (type)
1466 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1468 len--;
1469 TREE_INT_CST_ELT (nt, len)
1470 = zext_hwi (cst.elt (len),
1471 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1474 for (unsigned int i = 0; i < len; i++)
1475 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1476 TREE_TYPE (nt) = type;
1477 return nt;
1480 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1482 static tree
1483 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1484 CXX_MEM_STAT_INFO)
1486 size_t length = sizeof (struct tree_poly_int_cst);
1487 record_node_allocation_statistics (POLY_INT_CST, length);
1489 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1491 TREE_SET_CODE (t, POLY_INT_CST);
1492 TREE_CONSTANT (t) = 1;
1493 TREE_TYPE (t) = type;
1494 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1495 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1496 return t;
1499 /* Create a constant tree that contains CST sign-extended to TYPE. */
1501 tree
1502 build_int_cst (tree type, poly_int64 cst)
1504 /* Support legacy code. */
1505 if (!type)
1506 type = integer_type_node;
1508 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1511 /* Create a constant tree that contains CST zero-extended to TYPE. */
1513 tree
1514 build_int_cstu (tree type, poly_uint64 cst)
1516 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1519 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521 tree
1522 build_int_cst_type (tree type, poly_int64 cst)
1524 gcc_assert (type);
1525 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1528 /* Constructs tree in type TYPE from with value given by CST. Signedness
1529 of CST is assumed to be the same as the signedness of TYPE. */
1531 tree
1532 double_int_to_tree (tree type, double_int cst)
1534 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1537 /* We force the wide_int CST to the range of the type TYPE by sign or
1538 zero extending it. OVERFLOWABLE indicates if we are interested in
1539 overflow of the value, when >0 we are only interested in signed
1540 overflow, for <0 we are interested in any overflow. OVERFLOWED
1541 indicates whether overflow has already occurred. CONST_OVERFLOWED
1542 indicates whether constant overflow has already occurred. We force
1543 T's value to be within range of T's type (by setting to 0 or 1 all
1544 the bits outside the type's range). We set TREE_OVERFLOWED if,
1545 OVERFLOWED is nonzero,
1546 or OVERFLOWABLE is >0 and signed overflow occurs
1547 or OVERFLOWABLE is <0 and any overflow occurs
1548 We return a new tree node for the extended wide_int. The node
1549 is shared if no overflow flags are set. */
1552 tree
1553 force_fit_type (tree type, const poly_wide_int_ref &cst,
1554 int overflowable, bool overflowed)
1556 signop sign = TYPE_SIGN (type);
1558 /* If we need to set overflow flags, return a new unshared node. */
1559 if (overflowed || !wi::fits_to_tree_p (cst, type))
1561 if (overflowed
1562 || overflowable < 0
1563 || (overflowable > 0 && sign == SIGNED))
1565 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1566 sign);
1567 tree t;
1568 if (tmp.is_constant ())
1569 t = build_new_int_cst (type, tmp.coeffs[0]);
1570 else
1572 tree coeffs[NUM_POLY_INT_COEFFS];
1573 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1575 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1576 TREE_OVERFLOW (coeffs[i]) = 1;
1578 t = build_new_poly_int_cst (type, coeffs);
1580 TREE_OVERFLOW (t) = 1;
1581 return t;
1585 /* Else build a shared node. */
1586 return wide_int_to_tree (type, cst);
1589 /* These are the hash table functions for the hash table of INTEGER_CST
1590 nodes of a sizetype. */
1592 /* Return the hash code X, an INTEGER_CST. */
1594 hashval_t
1595 int_cst_hasher::hash (tree x)
1597 const_tree const t = x;
1598 hashval_t code = TYPE_UID (TREE_TYPE (t));
1599 int i;
1601 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1602 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1604 return code;
1607 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1608 is the same as that given by *Y, which is the same. */
1610 bool
1611 int_cst_hasher::equal (tree x, tree y)
1613 const_tree const xt = x;
1614 const_tree const yt = y;
1616 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1617 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1618 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1619 return false;
1621 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1622 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1623 return false;
1625 return true;
1628 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1629 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1630 number of slots that can be cached for the type. */
1632 static inline tree
1633 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1634 int slot, int max_slots)
1636 gcc_checking_assert (slot >= 0);
1637 /* Initialize cache. */
1638 if (!TYPE_CACHED_VALUES_P (type))
1640 TYPE_CACHED_VALUES_P (type) = 1;
1641 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1643 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1644 if (!t)
1646 /* Create a new shared int. */
1647 t = build_new_int_cst (type, cst);
1648 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1650 return t;
1653 /* Create an INT_CST node of TYPE and value CST.
1654 The returned node is always shared. For small integers we use a
1655 per-type vector cache, for larger ones we use a single hash table.
1656 The value is extended from its precision according to the sign of
1657 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1658 the upper bits and ensures that hashing and value equality based
1659 upon the underlying HOST_WIDE_INTs works without masking. */
1661 static tree
1662 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1664 tree t;
1665 int ix = -1;
1666 int limit = 0;
1668 gcc_assert (type);
1669 unsigned int prec = TYPE_PRECISION (type);
1670 signop sgn = TYPE_SIGN (type);
1672 /* Verify that everything is canonical. */
1673 int l = pcst.get_len ();
1674 if (l > 1)
1676 if (pcst.elt (l - 1) == 0)
1677 gcc_checking_assert (pcst.elt (l - 2) < 0);
1678 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1679 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1682 wide_int cst = wide_int::from (pcst, prec, sgn);
1683 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1685 enum tree_code code = TREE_CODE (type);
1686 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1688 /* Cache NULL pointer and zero bounds. */
1689 if (cst == 0)
1690 ix = 0;
1691 /* Cache upper bounds of pointers. */
1692 else if (cst == wi::max_value (prec, sgn))
1693 ix = 1;
1694 /* Cache 1 which is used for a non-zero range. */
1695 else if (cst == 1)
1696 ix = 2;
1698 if (ix >= 0)
1700 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1701 /* Make sure no one is clobbering the shared constant. */
1702 gcc_checking_assert (TREE_TYPE (t) == type
1703 && cst == wi::to_wide (t));
1704 return t;
1707 if (ext_len == 1)
1709 /* We just need to store a single HOST_WIDE_INT. */
1710 HOST_WIDE_INT hwi;
1711 if (TYPE_UNSIGNED (type))
1712 hwi = cst.to_uhwi ();
1713 else
1714 hwi = cst.to_shwi ();
1716 switch (code)
1718 case NULLPTR_TYPE:
1719 gcc_assert (hwi == 0);
1720 /* Fallthru. */
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Ignore pointers, as they were already handled above. */
1725 break;
1727 case BOOLEAN_TYPE:
1728 /* Cache false or true. */
1729 limit = 2;
1730 if (IN_RANGE (hwi, 0, 1))
1731 ix = hwi;
1732 break;
1734 case INTEGER_TYPE:
1735 case OFFSET_TYPE:
1736 case BITINT_TYPE:
1737 if (TYPE_SIGN (type) == UNSIGNED)
1739 /* Cache [0, N). */
1740 limit = param_integer_share_limit;
1741 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1742 ix = hwi;
1744 else
1746 /* Cache [-1, N). */
1747 limit = param_integer_share_limit + 1;
1748 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1749 ix = hwi + 1;
1751 break;
1753 case ENUMERAL_TYPE:
1754 break;
1756 default:
1757 gcc_unreachable ();
1760 if (ix >= 0)
1762 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t) == type
1765 && TREE_INT_CST_NUNITS (t) == 1
1766 && TREE_INT_CST_EXT_NUNITS (t) == 1
1767 && TREE_INT_CST_ELT (t, 0) == hwi);
1768 return t;
1770 else
1772 /* Use the cache of larger shared ints, using int_cst_node as
1773 a temporary. */
1775 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1776 TREE_TYPE (int_cst_node) = type;
1778 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1779 t = *slot;
1780 if (!t)
1782 /* Insert this one into the hash table. */
1783 t = int_cst_node;
1784 *slot = t;
1785 /* Make a new node for next time round. */
1786 int_cst_node = make_int_cst (1, 1);
1790 else
1792 /* The value either hashes properly or we drop it on the floor
1793 for the gc to take care of. There will not be enough of them
1794 to worry about. */
1796 tree nt = build_new_int_cst (type, cst);
1797 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1798 t = *slot;
1799 if (!t)
1801 /* Insert this one into the hash table. */
1802 t = nt;
1803 *slot = t;
1805 else
1806 ggc_free (nt);
1809 return t;
1812 hashval_t
1813 poly_int_cst_hasher::hash (tree t)
1815 inchash::hash hstate;
1817 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1818 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1819 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1821 return hstate.end ();
1824 bool
1825 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1827 if (TREE_TYPE (x) != y.first)
1828 return false;
1829 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1830 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1831 return false;
1832 return true;
1835 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1836 The elements must also have type TYPE. */
1838 tree
1839 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1841 unsigned int prec = TYPE_PRECISION (type);
1842 gcc_assert (prec <= values.coeffs[0].get_precision ());
1843 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1845 inchash::hash h;
1846 h.add_int (TYPE_UID (type));
1847 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1848 h.add_wide_int (c.coeffs[i]);
1849 poly_int_cst_hasher::compare_type comp (type, &c);
1850 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1851 INSERT);
1852 if (*slot == NULL_TREE)
1854 tree coeffs[NUM_POLY_INT_COEFFS];
1855 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1856 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1857 *slot = build_new_poly_int_cst (type, coeffs);
1859 return *slot;
1862 /* Create a constant tree with value VALUE in type TYPE. */
1864 tree
1865 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1867 if (value.is_constant ())
1868 return wide_int_to_tree_1 (type, value.coeffs[0]);
1869 return build_poly_int_cst (type, value);
1872 /* Insert INTEGER_CST T into a cache of integer constants. And return
1873 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1874 is false, and T falls into the type's 'smaller values' range, there
1875 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1876 or the value is large, should an existing entry exist, it is
1877 returned (rather than inserting T). */
1879 tree
1880 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1882 tree type = TREE_TYPE (t);
1883 int ix = -1;
1884 int limit = 0;
1885 int prec = TYPE_PRECISION (type);
1887 gcc_assert (!TREE_OVERFLOW (t));
1889 /* The caching indices here must match those in
1890 wide_int_to_type_1. */
1891 switch (TREE_CODE (type))
1893 case NULLPTR_TYPE:
1894 gcc_checking_assert (integer_zerop (t));
1895 /* Fallthru. */
1897 case POINTER_TYPE:
1898 case REFERENCE_TYPE:
1900 if (integer_zerop (t))
1901 ix = 0;
1902 else if (integer_onep (t))
1903 ix = 2;
1905 if (ix >= 0)
1906 limit = 3;
1908 break;
1910 case BOOLEAN_TYPE:
1911 /* Cache false or true. */
1912 limit = 2;
1913 if (wi::ltu_p (wi::to_wide (t), 2))
1914 ix = TREE_INT_CST_ELT (t, 0);
1915 break;
1917 case INTEGER_TYPE:
1918 case OFFSET_TYPE:
1919 case BITINT_TYPE:
1920 if (TYPE_UNSIGNED (type))
1922 /* Cache 0..N */
1923 limit = param_integer_share_limit;
1925 /* This is a little hokie, but if the prec is smaller than
1926 what is necessary to hold param_integer_share_limit, then the
1927 obvious test will not get the correct answer. */
1928 if (prec < HOST_BITS_PER_WIDE_INT)
1930 if (tree_to_uhwi (t)
1931 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1932 ix = tree_to_uhwi (t);
1934 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1935 ix = tree_to_uhwi (t);
1937 else
1939 /* Cache -1..N */
1940 limit = param_integer_share_limit + 1;
1942 if (integer_minus_onep (t))
1943 ix = 0;
1944 else if (!wi::neg_p (wi::to_wide (t)))
1946 if (prec < HOST_BITS_PER_WIDE_INT)
1948 if (tree_to_shwi (t) < param_integer_share_limit)
1949 ix = tree_to_shwi (t) + 1;
1951 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1952 ix = tree_to_shwi (t) + 1;
1955 break;
1957 case ENUMERAL_TYPE:
1958 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1959 members. */
1960 break;
1962 default:
1963 gcc_unreachable ();
1966 if (ix >= 0)
1968 /* Look for it in the type's vector of small shared ints. */
1969 if (!TYPE_CACHED_VALUES_P (type))
1971 TYPE_CACHED_VALUES_P (type) = 1;
1972 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1975 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1977 gcc_checking_assert (might_duplicate);
1978 t = r;
1980 else
1981 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1983 else
1985 /* Use the cache of larger shared ints. */
1986 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1987 if (tree r = *slot)
1989 /* If there is already an entry for the number verify it's the
1990 same value. */
1991 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1992 /* And return the cached value. */
1993 t = r;
1995 else
1996 /* Otherwise insert this one into the hash table. */
1997 *slot = t;
2000 return t;
2004 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2005 and the rest are zeros. */
2007 tree
2008 build_low_bits_mask (tree type, unsigned bits)
2010 gcc_assert (bits <= TYPE_PRECISION (type));
2012 return wide_int_to_tree (type, wi::mask (bits, false,
2013 TYPE_PRECISION (type)));
2016 /* Checks that X is integer constant that can be expressed in (unsigned)
2017 HOST_WIDE_INT without loss of precision. */
2019 bool
2020 cst_and_fits_in_hwi (const_tree x)
2022 return (TREE_CODE (x) == INTEGER_CST
2023 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2026 /* Build a newly constructed VECTOR_CST with the given values of
2027 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2029 tree
2030 make_vector (unsigned log2_npatterns,
2031 unsigned int nelts_per_pattern MEM_STAT_DECL)
2033 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2034 tree t;
2035 unsigned npatterns = 1 << log2_npatterns;
2036 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2037 unsigned length = (sizeof (struct tree_vector)
2038 + (encoded_nelts - 1) * sizeof (tree));
2040 record_node_allocation_statistics (VECTOR_CST, length);
2042 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2044 TREE_SET_CODE (t, VECTOR_CST);
2045 TREE_CONSTANT (t) = 1;
2046 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2047 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2049 return t;
2052 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2053 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2055 tree
2056 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2058 if (vec_safe_length (v) == 0)
2059 return build_zero_cst (type);
2061 unsigned HOST_WIDE_INT idx, nelts;
2062 tree value;
2064 /* We can't construct a VECTOR_CST for a variable number of elements. */
2065 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2066 tree_vector_builder vec (type, nelts, 1);
2067 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2069 if (TREE_CODE (value) == VECTOR_CST)
2071 /* If NELTS is constant then this must be too. */
2072 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2073 for (unsigned i = 0; i < sub_nelts; ++i)
2074 vec.quick_push (VECTOR_CST_ELT (value, i));
2076 else
2077 vec.quick_push (value);
2079 while (vec.length () < nelts)
2080 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2082 return vec.build ();
2085 /* Build a vector of type VECTYPE where all the elements are SCs. */
2086 tree
2087 build_vector_from_val (tree vectype, tree sc)
2089 unsigned HOST_WIDE_INT i, nunits;
2091 if (sc == error_mark_node)
2092 return sc;
2094 /* Verify that the vector type is suitable for SC. Note that there
2095 is some inconsistency in the type-system with respect to restrict
2096 qualifications of pointers. Vector types always have a main-variant
2097 element type and the qualification is applied to the vector-type.
2098 So TREE_TYPE (vector-type) does not return a properly qualified
2099 vector element-type. */
2100 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2101 TREE_TYPE (vectype)));
2103 if (CONSTANT_CLASS_P (sc))
2105 tree_vector_builder v (vectype, 1, 1);
2106 v.quick_push (sc);
2107 return v.build ();
2109 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2110 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2111 else
2113 vec<constructor_elt, va_gc> *v;
2114 vec_alloc (v, nunits);
2115 for (i = 0; i < nunits; ++i)
2116 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2117 return build_constructor (vectype, v);
2121 /* If TYPE is not a vector type, just return SC, otherwise return
2122 build_vector_from_val (TYPE, SC). */
2124 tree
2125 build_uniform_cst (tree type, tree sc)
2127 if (!VECTOR_TYPE_P (type))
2128 return sc;
2130 return build_vector_from_val (type, sc);
2133 /* Build a vector series of type TYPE in which element I has the value
2134 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2135 and a VEC_SERIES_EXPR otherwise. */
2137 tree
2138 build_vec_series (tree type, tree base, tree step)
2140 if (integer_zerop (step))
2141 return build_vector_from_val (type, base);
2142 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2144 tree_vector_builder builder (type, 1, 3);
2145 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (base) + wi::to_wide (step));
2147 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2148 wi::to_wide (elt1) + wi::to_wide (step));
2149 builder.quick_push (base);
2150 builder.quick_push (elt1);
2151 builder.quick_push (elt2);
2152 return builder.build ();
2154 return build2 (VEC_SERIES_EXPR, type, base, step);
2157 /* Return a vector with the same number of units and number of bits
2158 as VEC_TYPE, but in which the elements are a linear series of unsigned
2159 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2161 tree
2162 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2164 tree index_vec_type = vec_type;
2165 tree index_elt_type = TREE_TYPE (vec_type);
2166 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2167 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2169 index_elt_type = build_nonstandard_integer_type
2170 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2171 index_vec_type = build_vector_type (index_elt_type, nunits);
2174 tree_vector_builder v (index_vec_type, 1, 3);
2175 for (unsigned int i = 0; i < 3; ++i)
2176 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2177 return v.build ();
2180 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2181 elements are A and the rest are B. */
2183 tree
2184 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2186 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2187 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2188 /* Optimize the constant case. */
2189 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2190 count /= 2;
2191 tree_vector_builder builder (vec_type, count, 2);
2192 for (unsigned int i = 0; i < count * 2; ++i)
2193 builder.quick_push (i < num_a ? a : b);
2194 return builder.build ();
2197 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2198 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2200 void
2201 recompute_constructor_flags (tree c)
2203 unsigned int i;
2204 tree val;
2205 bool constant_p = true;
2206 bool side_effects_p = false;
2207 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2209 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2211 /* Mostly ctors will have elts that don't have side-effects, so
2212 the usual case is to scan all the elements. Hence a single
2213 loop for both const and side effects, rather than one loop
2214 each (with early outs). */
2215 if (!TREE_CONSTANT (val))
2216 constant_p = false;
2217 if (TREE_SIDE_EFFECTS (val))
2218 side_effects_p = true;
2221 TREE_SIDE_EFFECTS (c) = side_effects_p;
2222 TREE_CONSTANT (c) = constant_p;
2225 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2226 CONSTRUCTOR C. */
2228 void
2229 verify_constructor_flags (tree c)
2231 unsigned int i;
2232 tree val;
2233 bool constant_p = TREE_CONSTANT (c);
2234 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2235 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2237 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2239 if (constant_p && !TREE_CONSTANT (val))
2240 internal_error ("non-constant element in constant CONSTRUCTOR");
2241 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2242 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2246 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2247 are in the vec pointed to by VALS. */
2248 tree
2249 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2251 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2253 TREE_TYPE (c) = type;
2254 CONSTRUCTOR_ELTS (c) = vals;
2256 recompute_constructor_flags (c);
2258 return c;
2261 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2262 INDEX and VALUE. */
2263 tree
2264 build_constructor_single (tree type, tree index, tree value)
2266 vec<constructor_elt, va_gc> *v;
2267 constructor_elt elt = {index, value};
2269 vec_alloc (v, 1);
2270 v->quick_push (elt);
2272 return build_constructor (type, v);
2276 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2277 are in a list pointed to by VALS. */
2278 tree
2279 build_constructor_from_list (tree type, tree vals)
2281 tree t;
2282 vec<constructor_elt, va_gc> *v = NULL;
2284 if (vals)
2286 vec_alloc (v, list_length (vals));
2287 for (t = vals; t; t = TREE_CHAIN (t))
2288 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2291 return build_constructor (type, v);
2294 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2295 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2296 fields in the constructor remain null. */
2298 tree
2299 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2301 vec<constructor_elt, va_gc> *v = NULL;
2303 for (tree t : vals)
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2306 return build_constructor (type, v);
2309 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2310 of elements, provided as index/value pairs. */
2312 tree
2313 build_constructor_va (tree type, int nelts, ...)
2315 vec<constructor_elt, va_gc> *v = NULL;
2316 va_list p;
2318 va_start (p, nelts);
2319 vec_alloc (v, nelts);
2320 while (nelts--)
2322 tree index = va_arg (p, tree);
2323 tree value = va_arg (p, tree);
2324 CONSTRUCTOR_APPEND_ELT (v, index, value);
2326 va_end (p);
2327 return build_constructor (type, v);
2330 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2332 tree
2333 build_clobber (tree type, enum clobber_kind kind)
2335 tree clobber = build_constructor (type, NULL);
2336 TREE_THIS_VOLATILE (clobber) = true;
2337 CLOBBER_KIND (clobber) = kind;
2338 return clobber;
2341 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2343 tree
2344 build_fixed (tree type, FIXED_VALUE_TYPE f)
2346 tree v;
2347 FIXED_VALUE_TYPE *fp;
2349 v = make_node (FIXED_CST);
2350 fp = ggc_alloc<fixed_value> ();
2351 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2353 TREE_TYPE (v) = type;
2354 TREE_FIXED_CST_PTR (v) = fp;
2355 return v;
2358 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2360 tree
2361 build_real (tree type, REAL_VALUE_TYPE d)
2363 tree v;
2364 int overflow = 0;
2366 /* dconst{0,1,2,m1,half} are used in various places in
2367 the middle-end and optimizers, allow them here
2368 even for decimal floating point types as an exception
2369 by converting them to decimal. */
2370 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2371 && (d.cl == rvc_normal || d.cl == rvc_zero)
2372 && !d.decimal)
2374 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "1");
2376 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "2");
2378 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "-1");
2380 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "0.5");
2382 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2384 /* Make sure to give zero the minimum quantum exponent for
2385 the type (which corresponds to all bits zero). */
2386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2387 char buf[16];
2388 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2389 decimal_real_from_string (&d, buf);
2391 else
2392 gcc_unreachable ();
2395 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2396 Consider doing it via real_convert now. */
2398 v = make_node (REAL_CST);
2399 TREE_TYPE (v) = type;
2400 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2401 TREE_OVERFLOW (v) = overflow;
2402 return v;
2405 /* Like build_real, but first truncate D to the type. */
2407 tree
2408 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2410 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value of the INTEGER_CST node I. */
2416 REAL_VALUE_TYPE
2417 real_value_from_int_cst (const_tree type, const_tree i)
2419 REAL_VALUE_TYPE d;
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d, 0, sizeof d);
2425 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2426 TYPE_SIGN (TREE_TYPE (i)));
2427 return d;
2430 /* Given a tree representing an integer constant I, return a tree
2431 representing the same value as a floating-point constant of type TYPE. */
2433 tree
2434 build_real_from_int_cst (tree type, const_tree i)
2436 tree v;
2437 int overflow = TREE_OVERFLOW (i);
2439 v = build_real (type, real_value_from_int_cst (type, i));
2441 TREE_OVERFLOW (v) |= overflow;
2442 return v;
2445 /* Return a new REAL_CST node whose type is TYPE
2446 and whose value is the integer value I which has sign SGN. */
2448 tree
2449 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2451 REAL_VALUE_TYPE d;
2453 /* Clear all bits of the real value type so that we can later do
2454 bitwise comparisons to see if two values are the same. */
2455 memset (&d, 0, sizeof d);
2457 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2458 return build_real (type, d);
2461 /* Return a newly constructed STRING_CST node whose value is the LEN
2462 characters at STR when STR is nonnull, or all zeros otherwise.
2463 Note that for a C string literal, LEN should include the trailing NUL.
2464 The TREE_TYPE is not initialized. */
2466 tree
2467 build_string (unsigned len, const char *str /*= NULL */)
2469 /* Do not waste bytes provided by padding of struct tree_string. */
2470 unsigned size = len + offsetof (struct tree_string, str) + 1;
2472 record_node_allocation_statistics (STRING_CST, size);
2474 tree s = (tree) ggc_internal_alloc (size);
2476 memset (s, 0, sizeof (struct tree_typed));
2477 TREE_SET_CODE (s, STRING_CST);
2478 TREE_CONSTANT (s) = 1;
2479 TREE_STRING_LENGTH (s) = len;
2480 if (str)
2481 memcpy (s->string.str, str, len);
2482 else
2483 memset (s->string.str, 0, len);
2484 s->string.str[len] = '\0';
2486 return s;
2489 /* Return a newly constructed COMPLEX_CST node whose value is
2490 specified by the real and imaginary parts REAL and IMAG.
2491 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2492 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2494 tree
2495 build_complex (tree type, tree real, tree imag)
2497 gcc_assert (CONSTANT_CLASS_P (real));
2498 gcc_assert (CONSTANT_CLASS_P (imag));
2500 tree t = make_node (COMPLEX_CST);
2502 TREE_REALPART (t) = real;
2503 TREE_IMAGPART (t) = imag;
2504 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2505 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2506 return t;
2509 /* Build a complex (inf +- 0i), such as for the result of cproj.
2510 TYPE is the complex tree type of the result. If NEG is true, the
2511 imaginary zero is negative. */
2513 tree
2514 build_complex_inf (tree type, bool neg)
2516 REAL_VALUE_TYPE rzero = dconst0;
2518 rzero.sign = neg;
2519 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2520 build_real (TREE_TYPE (type), rzero));
2523 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2524 element is set to 1. In particular, this is 1 + i for complex types. */
2526 tree
2527 build_each_one_cst (tree type)
2529 if (TREE_CODE (type) == COMPLEX_TYPE)
2531 tree scalar = build_one_cst (TREE_TYPE (type));
2532 return build_complex (type, scalar, scalar);
2534 else
2535 return build_one_cst (type);
2538 /* Return a constant of arithmetic type TYPE which is the
2539 multiplicative identity of the set TYPE. */
2541 tree
2542 build_one_cst (tree type)
2544 switch (TREE_CODE (type))
2546 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2547 case POINTER_TYPE: case REFERENCE_TYPE:
2548 case OFFSET_TYPE: case BITINT_TYPE:
2549 return build_int_cst (type, 1);
2551 case REAL_TYPE:
2552 return build_real (type, dconst1);
2554 case FIXED_POINT_TYPE:
2555 /* We can only generate 1 for accum types. */
2556 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2557 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2559 case VECTOR_TYPE:
2561 tree scalar = build_one_cst (TREE_TYPE (type));
2563 return build_vector_from_val (type, scalar);
2566 case COMPLEX_TYPE:
2567 return build_complex (type,
2568 build_one_cst (TREE_TYPE (type)),
2569 build_zero_cst (TREE_TYPE (type)));
2571 default:
2572 gcc_unreachable ();
2576 /* Return an integer of type TYPE containing all 1's in as much precision as
2577 it contains, or a complex or vector whose subparts are such integers. */
2579 tree
2580 build_all_ones_cst (tree type)
2582 if (TREE_CODE (type) == COMPLEX_TYPE)
2584 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2585 return build_complex (type, scalar, scalar);
2587 else
2588 return build_minus_one_cst (type);
2591 /* Return a constant of arithmetic type TYPE which is the
2592 opposite of the multiplicative identity of the set TYPE. */
2594 tree
2595 build_minus_one_cst (tree type)
2597 switch (TREE_CODE (type))
2599 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2600 case POINTER_TYPE: case REFERENCE_TYPE:
2601 case OFFSET_TYPE: case BITINT_TYPE:
2602 return build_int_cst (type, -1);
2604 case REAL_TYPE:
2605 return build_real (type, dconstm1);
2607 case FIXED_POINT_TYPE:
2608 /* We can only generate 1 for accum types. */
2609 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2610 return build_fixed (type,
2611 fixed_from_double_int (double_int_minus_one,
2612 SCALAR_TYPE_MODE (type)));
2614 case VECTOR_TYPE:
2616 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2618 return build_vector_from_val (type, scalar);
2621 case COMPLEX_TYPE:
2622 return build_complex (type,
2623 build_minus_one_cst (TREE_TYPE (type)),
2624 build_zero_cst (TREE_TYPE (type)));
2626 default:
2627 gcc_unreachable ();
2631 /* Build 0 constant of type TYPE. This is used by constructor folding
2632 and thus the constant should be represented in memory by
2633 zero(es). */
2635 tree
2636 build_zero_cst (tree type)
2638 switch (TREE_CODE (type))
2640 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2641 case POINTER_TYPE: case REFERENCE_TYPE:
2642 case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
2643 return build_int_cst (type, 0);
2645 case REAL_TYPE:
2646 return build_real (type, dconst0);
2648 case FIXED_POINT_TYPE:
2649 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2651 case VECTOR_TYPE:
2653 tree scalar = build_zero_cst (TREE_TYPE (type));
2655 return build_vector_from_val (type, scalar);
2658 case COMPLEX_TYPE:
2660 tree zero = build_zero_cst (TREE_TYPE (type));
2662 return build_complex (type, zero, zero);
2665 default:
2666 if (!AGGREGATE_TYPE_P (type))
2667 return fold_convert (type, integer_zero_node);
2668 return build_constructor (type, NULL);
2672 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2673 every WIDTH bits to fit TYPE's precision. */
2675 tree
2676 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2678 int n = ((TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2679 / HOST_BITS_PER_WIDE_INT);
2680 unsigned HOST_WIDE_INT low, mask;
2681 HOST_WIDE_INT a[WIDE_INT_MAX_INL_ELTS];
2682 int i;
2684 gcc_assert (n && n <= WIDE_INT_MAX_INL_ELTS);
2686 if (width == HOST_BITS_PER_WIDE_INT)
2687 low = value;
2688 else
2690 mask = ((HOST_WIDE_INT)1 << width) - 1;
2691 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2694 for (i = 0; i < n; i++)
2695 a[i] = low;
2697 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2698 return wide_int_to_tree (type, wide_int::from_array (a, n,
2699 TYPE_PRECISION (type)));
2702 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2703 unsigned constant in which only the sign bit is set. Return null
2704 otherwise. */
2706 tree
2707 sign_mask_for (tree type)
2709 /* Avoid having to choose between a real-only sign and a pair of signs.
2710 This could be relaxed if the choice becomes obvious later. */
2711 if (TREE_CODE (type) == COMPLEX_TYPE)
2712 return NULL_TREE;
2714 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2715 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2716 if (!bits || !pow2p_hwi (bits))
2717 return NULL_TREE;
2719 tree inttype = unsigned_type_for (type);
2720 if (!inttype)
2721 return NULL_TREE;
2723 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2724 if (VECTOR_TYPE_P (inttype))
2726 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2727 return build_vector_from_val (inttype, elt);
2729 return wide_int_to_tree (inttype, mask);
2732 /* Build a BINFO with LEN language slots. */
2734 tree
2735 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2737 tree t;
2738 size_t length = (offsetof (struct tree_binfo, base_binfos)
2739 + vec<tree, va_gc>::embedded_size (base_binfos));
2741 record_node_allocation_statistics (TREE_BINFO, length);
2743 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2745 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2747 TREE_SET_CODE (t, TREE_BINFO);
2749 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2751 return t;
2754 /* Create a CASE_LABEL_EXPR tree node and return it. */
2756 tree
2757 build_case_label (tree low_value, tree high_value, tree label_decl)
2759 tree t = make_node (CASE_LABEL_EXPR);
2761 TREE_TYPE (t) = void_type_node;
2762 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2764 CASE_LOW (t) = low_value;
2765 CASE_HIGH (t) = high_value;
2766 CASE_LABEL (t) = label_decl;
2767 CASE_CHAIN (t) = NULL_TREE;
2769 return t;
2772 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2773 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2774 The latter determines the length of the HOST_WIDE_INT vector. */
2776 tree
2777 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2779 tree t;
2780 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2781 + sizeof (struct tree_int_cst));
2783 gcc_assert (len);
2784 record_node_allocation_statistics (INTEGER_CST, length);
2786 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2788 TREE_SET_CODE (t, INTEGER_CST);
2789 TREE_INT_CST_NUNITS (t) = len;
2790 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2791 TREE_CONSTANT (t) = 1;
2793 return t;
2796 /* Build a newly constructed TREE_VEC node of length LEN. */
2798 tree
2799 make_tree_vec (int len MEM_STAT_DECL)
2801 tree t;
2802 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2804 record_node_allocation_statistics (TREE_VEC, length);
2806 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2808 TREE_SET_CODE (t, TREE_VEC);
2809 TREE_VEC_LENGTH (t) = len;
2811 return t;
2814 /* Grow a TREE_VEC node to new length LEN. */
2816 tree
2817 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2819 gcc_assert (TREE_CODE (v) == TREE_VEC);
2821 int oldlen = TREE_VEC_LENGTH (v);
2822 gcc_assert (len > oldlen);
2824 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2825 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2827 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2829 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2831 TREE_VEC_LENGTH (v) = len;
2833 return v;
2836 /* Return true if EXPR is the constant zero, whether it is integral, float or
2837 fixed, and scalar, complex or vector. */
2839 bool
2840 zerop (const_tree expr)
2842 return (integer_zerop (expr)
2843 || real_zerop (expr)
2844 || fixed_zerop (expr));
2847 /* Return true if EXPR is the integer constant zero or a complex constant
2848 of zero, or a location wrapper for such a constant. */
2850 bool
2851 integer_zerop (const_tree expr)
2853 STRIP_ANY_LOCATION_WRAPPER (expr);
2855 switch (TREE_CODE (expr))
2857 case INTEGER_CST:
2858 return wi::to_wide (expr) == 0;
2859 case COMPLEX_CST:
2860 return (integer_zerop (TREE_REALPART (expr))
2861 && integer_zerop (TREE_IMAGPART (expr)));
2862 case VECTOR_CST:
2863 return (VECTOR_CST_NPATTERNS (expr) == 1
2864 && VECTOR_CST_DUPLICATE_P (expr)
2865 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2866 default:
2867 return false;
2871 /* Return true if EXPR is the integer constant one or the corresponding
2872 complex constant, or a location wrapper for such a constant. */
2874 bool
2875 integer_onep (const_tree expr)
2877 STRIP_ANY_LOCATION_WRAPPER (expr);
2879 switch (TREE_CODE (expr))
2881 case INTEGER_CST:
2882 return wi::eq_p (wi::to_widest (expr), 1);
2883 case COMPLEX_CST:
2884 return (integer_onep (TREE_REALPART (expr))
2885 && integer_zerop (TREE_IMAGPART (expr)));
2886 case VECTOR_CST:
2887 return (VECTOR_CST_NPATTERNS (expr) == 1
2888 && VECTOR_CST_DUPLICATE_P (expr)
2889 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2890 default:
2891 return false;
2895 /* Return true if EXPR is the integer constant one. For complex and vector,
2896 return true if every piece is the integer constant one.
2897 Also return true for location wrappers for such a constant. */
2899 bool
2900 integer_each_onep (const_tree expr)
2902 STRIP_ANY_LOCATION_WRAPPER (expr);
2904 if (TREE_CODE (expr) == COMPLEX_CST)
2905 return (integer_onep (TREE_REALPART (expr))
2906 && integer_onep (TREE_IMAGPART (expr)));
2907 else
2908 return integer_onep (expr);
2911 /* Return true if EXPR is an integer containing all 1's in as much precision
2912 as it contains, or a complex or vector whose subparts are such integers,
2913 or a location wrapper for such a constant. */
2915 bool
2916 integer_all_onesp (const_tree expr)
2918 STRIP_ANY_LOCATION_WRAPPER (expr);
2920 if (TREE_CODE (expr) == COMPLEX_CST
2921 && integer_all_onesp (TREE_REALPART (expr))
2922 && integer_all_onesp (TREE_IMAGPART (expr)))
2923 return true;
2925 else if (TREE_CODE (expr) == VECTOR_CST)
2926 return (VECTOR_CST_NPATTERNS (expr) == 1
2927 && VECTOR_CST_DUPLICATE_P (expr)
2928 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2930 else if (TREE_CODE (expr) != INTEGER_CST)
2931 return false;
2933 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2934 == wi::to_wide (expr));
2937 /* Return true if EXPR is the integer constant minus one, or a location
2938 wrapper for such a constant. */
2940 bool
2941 integer_minus_onep (const_tree expr)
2943 STRIP_ANY_LOCATION_WRAPPER (expr);
2945 if (TREE_CODE (expr) == COMPLEX_CST)
2946 return (integer_all_onesp (TREE_REALPART (expr))
2947 && integer_zerop (TREE_IMAGPART (expr)));
2948 else
2949 return integer_all_onesp (expr);
2952 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2953 only one bit on), or a location wrapper for such a constant. */
2955 bool
2956 integer_pow2p (const_tree expr)
2958 STRIP_ANY_LOCATION_WRAPPER (expr);
2960 if (TREE_CODE (expr) == COMPLEX_CST
2961 && integer_pow2p (TREE_REALPART (expr))
2962 && integer_zerop (TREE_IMAGPART (expr)))
2963 return true;
2965 if (TREE_CODE (expr) != INTEGER_CST)
2966 return false;
2968 return wi::popcount (wi::to_wide (expr)) == 1;
2971 /* Return true if EXPR is an integer constant other than zero or a
2972 complex constant other than zero, or a location wrapper for such a
2973 constant. */
2975 bool
2976 integer_nonzerop (const_tree expr)
2978 STRIP_ANY_LOCATION_WRAPPER (expr);
2980 return ((TREE_CODE (expr) == INTEGER_CST
2981 && wi::to_wide (expr) != 0)
2982 || (TREE_CODE (expr) == COMPLEX_CST
2983 && (integer_nonzerop (TREE_REALPART (expr))
2984 || integer_nonzerop (TREE_IMAGPART (expr)))));
2987 /* Return true if EXPR is the integer constant one. For vector,
2988 return true if every piece is the integer constant minus one
2989 (representing the value TRUE).
2990 Also return true for location wrappers for such a constant. */
2992 bool
2993 integer_truep (const_tree expr)
2995 STRIP_ANY_LOCATION_WRAPPER (expr);
2997 if (TREE_CODE (expr) == VECTOR_CST)
2998 return integer_all_onesp (expr);
2999 return integer_onep (expr);
3002 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3003 for such a constant. */
3005 bool
3006 fixed_zerop (const_tree expr)
3008 STRIP_ANY_LOCATION_WRAPPER (expr);
3010 return (TREE_CODE (expr) == FIXED_CST
3011 && TREE_FIXED_CST (expr).data.is_zero ());
3014 /* Return the power of two represented by a tree node known to be a
3015 power of two. */
3018 tree_log2 (const_tree expr)
3020 if (TREE_CODE (expr) == COMPLEX_CST)
3021 return tree_log2 (TREE_REALPART (expr));
3023 return wi::exact_log2 (wi::to_wide (expr));
3026 /* Similar, but return the largest integer Y such that 2 ** Y is less
3027 than or equal to EXPR. */
3030 tree_floor_log2 (const_tree expr)
3032 if (TREE_CODE (expr) == COMPLEX_CST)
3033 return tree_log2 (TREE_REALPART (expr));
3035 return wi::floor_log2 (wi::to_wide (expr));
3038 /* Return number of known trailing zero bits in EXPR, or, if the value of
3039 EXPR is known to be zero, the precision of it's type. */
3041 unsigned int
3042 tree_ctz (const_tree expr)
3044 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3045 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3046 return 0;
3048 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3049 switch (TREE_CODE (expr))
3051 case INTEGER_CST:
3052 ret1 = wi::ctz (wi::to_wide (expr));
3053 return MIN (ret1, prec);
3054 case SSA_NAME:
3055 ret1 = wi::ctz (get_nonzero_bits (expr));
3056 return MIN (ret1, prec);
3057 case PLUS_EXPR:
3058 case MINUS_EXPR:
3059 case BIT_IOR_EXPR:
3060 case BIT_XOR_EXPR:
3061 case MIN_EXPR:
3062 case MAX_EXPR:
3063 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3064 if (ret1 == 0)
3065 return ret1;
3066 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3067 return MIN (ret1, ret2);
3068 case POINTER_PLUS_EXPR:
3069 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3070 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3071 /* Second operand is sizetype, which could be in theory
3072 wider than pointer's precision. Make sure we never
3073 return more than prec. */
3074 ret2 = MIN (ret2, prec);
3075 return MIN (ret1, ret2);
3076 case BIT_AND_EXPR:
3077 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3078 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3079 return MAX (ret1, ret2);
3080 case MULT_EXPR:
3081 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3082 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3083 return MIN (ret1 + ret2, prec);
3084 case LSHIFT_EXPR:
3085 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3086 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3087 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3089 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3090 return MIN (ret1 + ret2, prec);
3092 return ret1;
3093 case RSHIFT_EXPR:
3094 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3095 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3097 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3098 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3099 if (ret1 > ret2)
3100 return ret1 - ret2;
3102 return 0;
3103 case TRUNC_DIV_EXPR:
3104 case CEIL_DIV_EXPR:
3105 case FLOOR_DIV_EXPR:
3106 case ROUND_DIV_EXPR:
3107 case EXACT_DIV_EXPR:
3108 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3109 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3111 int l = tree_log2 (TREE_OPERAND (expr, 1));
3112 if (l >= 0)
3114 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3115 ret2 = l;
3116 if (ret1 > ret2)
3117 return ret1 - ret2;
3120 return 0;
3121 CASE_CONVERT:
3122 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3123 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3124 ret1 = prec;
3125 return MIN (ret1, prec);
3126 case SAVE_EXPR:
3127 return tree_ctz (TREE_OPERAND (expr, 0));
3128 case COND_EXPR:
3129 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3130 if (ret1 == 0)
3131 return 0;
3132 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3133 return MIN (ret1, ret2);
3134 case COMPOUND_EXPR:
3135 return tree_ctz (TREE_OPERAND (expr, 1));
3136 case ADDR_EXPR:
3137 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3138 if (ret1 > BITS_PER_UNIT)
3140 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3141 return MIN (ret1, prec);
3143 return 0;
3144 default:
3145 return 0;
3149 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3150 decimal float constants, so don't return true for them.
3151 Also return true for location wrappers around such a constant. */
3153 bool
3154 real_zerop (const_tree expr)
3156 STRIP_ANY_LOCATION_WRAPPER (expr);
3158 switch (TREE_CODE (expr))
3160 case REAL_CST:
3161 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3162 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3163 case COMPLEX_CST:
3164 return real_zerop (TREE_REALPART (expr))
3165 && real_zerop (TREE_IMAGPART (expr));
3166 case VECTOR_CST:
3168 /* Don't simply check for a duplicate because the predicate
3169 accepts both +0.0 and -0.0. */
3170 unsigned count = vector_cst_encoded_nelts (expr);
3171 for (unsigned int i = 0; i < count; ++i)
3172 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3173 return false;
3174 return true;
3176 default:
3177 return false;
3181 /* Return true if EXPR is the real constant one in real or complex form.
3182 Trailing zeroes matter for decimal float constants, so don't return
3183 true for them.
3184 Also return true for location wrappers around such a constant. */
3186 bool
3187 real_onep (const_tree expr)
3189 STRIP_ANY_LOCATION_WRAPPER (expr);
3191 switch (TREE_CODE (expr))
3193 case REAL_CST:
3194 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3195 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3196 case COMPLEX_CST:
3197 return real_onep (TREE_REALPART (expr))
3198 && real_zerop (TREE_IMAGPART (expr));
3199 case VECTOR_CST:
3200 return (VECTOR_CST_NPATTERNS (expr) == 1
3201 && VECTOR_CST_DUPLICATE_P (expr)
3202 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3203 default:
3204 return false;
3208 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3209 matter for decimal float constants, so don't return true for them.
3210 Also return true for location wrappers around such a constant. */
3212 bool
3213 real_minus_onep (const_tree expr)
3215 STRIP_ANY_LOCATION_WRAPPER (expr);
3217 switch (TREE_CODE (expr))
3219 case REAL_CST:
3220 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3221 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3222 case COMPLEX_CST:
3223 return real_minus_onep (TREE_REALPART (expr))
3224 && real_zerop (TREE_IMAGPART (expr));
3225 case VECTOR_CST:
3226 return (VECTOR_CST_NPATTERNS (expr) == 1
3227 && VECTOR_CST_DUPLICATE_P (expr)
3228 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3229 default:
3230 return false;
3234 /* Return true if T could be a floating point zero. */
3236 bool
3237 real_maybe_zerop (const_tree expr)
3239 switch (TREE_CODE (expr))
3241 case REAL_CST:
3242 /* Can't use real_zerop here, as it always returns false for decimal
3243 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3244 either, as decimal zeros are rvc_normal. */
3245 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3246 case COMPLEX_CST:
3247 return (real_maybe_zerop (TREE_REALPART (expr))
3248 || real_maybe_zerop (TREE_IMAGPART (expr)));
3249 case VECTOR_CST:
3251 unsigned count = vector_cst_encoded_nelts (expr);
3252 for (unsigned int i = 0; i < count; ++i)
3253 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3254 return true;
3255 return false;
3257 default:
3258 /* Perhaps for SSA_NAMEs we could query frange. */
3259 return true;
3263 /* True if EXP is a constant or a cast of a constant. */
3265 bool
3266 really_constant_p (const_tree exp)
3268 /* This is not quite the same as STRIP_NOPS. It does more. */
3269 while (CONVERT_EXPR_P (exp)
3270 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3271 exp = TREE_OPERAND (exp, 0);
3272 return TREE_CONSTANT (exp);
3275 /* Return true if T holds a polynomial pointer difference, storing it in
3276 *VALUE if so. A true return means that T's precision is no greater
3277 than 64 bits, which is the largest address space we support, so *VALUE
3278 never loses precision. However, the signedness of the result does
3279 not necessarily match the signedness of T: sometimes an unsigned type
3280 like sizetype is used to encode a value that is actually negative. */
3282 bool
3283 ptrdiff_tree_p (const_tree t, poly_int64 *value)
3285 if (!t)
3286 return false;
3287 if (TREE_CODE (t) == INTEGER_CST)
3289 if (!cst_and_fits_in_hwi (t))
3290 return false;
3291 *value = int_cst_value (t);
3292 return true;
3294 if (POLY_INT_CST_P (t))
3296 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3297 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3298 return false;
3299 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3300 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3301 return true;
3303 return false;
3306 poly_int64
3307 tree_to_poly_int64 (const_tree t)
3309 gcc_assert (tree_fits_poly_int64_p (t));
3310 if (POLY_INT_CST_P (t))
3311 return poly_int_cst_value (t).force_shwi ();
3312 return TREE_INT_CST_LOW (t);
3315 poly_uint64
3316 tree_to_poly_uint64 (const_tree t)
3318 gcc_assert (tree_fits_poly_uint64_p (t));
3319 if (POLY_INT_CST_P (t))
3320 return poly_int_cst_value (t).force_uhwi ();
3321 return TREE_INT_CST_LOW (t);
3324 /* Return first list element whose TREE_VALUE is ELEM.
3325 Return 0 if ELEM is not in LIST. */
3327 tree
3328 value_member (tree elem, tree list)
3330 while (list)
3332 if (elem == TREE_VALUE (list))
3333 return list;
3334 list = TREE_CHAIN (list);
3336 return NULL_TREE;
3339 /* Return first list element whose TREE_PURPOSE is ELEM.
3340 Return 0 if ELEM is not in LIST. */
3342 tree
3343 purpose_member (const_tree elem, tree list)
3345 while (list)
3347 if (elem == TREE_PURPOSE (list))
3348 return list;
3349 list = TREE_CHAIN (list);
3351 return NULL_TREE;
3354 /* Return true if ELEM is in V. */
3356 bool
3357 vec_member (const_tree elem, vec<tree, va_gc> *v)
3359 unsigned ix;
3360 tree t;
3361 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3362 if (elem == t)
3363 return true;
3364 return false;
3367 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3368 NULL_TREE. */
3370 tree
3371 chain_index (int idx, tree chain)
3373 for (; chain && idx > 0; --idx)
3374 chain = TREE_CHAIN (chain);
3375 return chain;
3378 /* Return true if ELEM is part of the chain CHAIN. */
3380 bool
3381 chain_member (const_tree elem, const_tree chain)
3383 while (chain)
3385 if (elem == chain)
3386 return true;
3387 chain = DECL_CHAIN (chain);
3390 return false;
3393 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3394 We expect a null pointer to mark the end of the chain.
3395 This is the Lisp primitive `length'. */
3398 list_length (const_tree t)
3400 const_tree p = t;
3401 #ifdef ENABLE_TREE_CHECKING
3402 const_tree q = t;
3403 #endif
3404 int len = 0;
3406 while (p)
3408 p = TREE_CHAIN (p);
3409 #ifdef ENABLE_TREE_CHECKING
3410 if (len % 2)
3411 q = TREE_CHAIN (q);
3412 gcc_assert (p != q);
3413 #endif
3414 len++;
3417 return len;
3420 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3421 UNION_TYPE TYPE, or NULL_TREE if none. */
3423 tree
3424 first_field (const_tree type)
3426 tree t = TYPE_FIELDS (type);
3427 while (t && TREE_CODE (t) != FIELD_DECL)
3428 t = TREE_CHAIN (t);
3429 return t;
3432 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3433 UNION_TYPE TYPE, or NULL_TREE if none. */
3435 tree
3436 last_field (const_tree type)
3438 tree last = NULL_TREE;
3440 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3442 if (TREE_CODE (fld) != FIELD_DECL)
3443 continue;
3445 last = fld;
3448 return last;
3451 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3452 by modifying the last node in chain 1 to point to chain 2.
3453 This is the Lisp primitive `nconc'. */
3455 tree
3456 chainon (tree op1, tree op2)
3458 tree t1;
3460 if (!op1)
3461 return op2;
3462 if (!op2)
3463 return op1;
3465 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3466 continue;
3467 TREE_CHAIN (t1) = op2;
3469 #ifdef ENABLE_TREE_CHECKING
3471 tree t2;
3472 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3473 gcc_assert (t2 != t1);
3475 #endif
3477 return op1;
3480 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3482 tree
3483 tree_last (tree chain)
3485 tree next;
3486 if (chain)
3487 while ((next = TREE_CHAIN (chain)))
3488 chain = next;
3489 return chain;
3492 /* Reverse the order of elements in the chain T,
3493 and return the new head of the chain (old last element). */
3495 tree
3496 nreverse (tree t)
3498 tree prev = 0, decl, next;
3499 for (decl = t; decl; decl = next)
3501 /* We shouldn't be using this function to reverse BLOCK chains; we
3502 have blocks_nreverse for that. */
3503 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3504 next = TREE_CHAIN (decl);
3505 TREE_CHAIN (decl) = prev;
3506 prev = decl;
3508 return prev;
3511 /* Return a newly created TREE_LIST node whose
3512 purpose and value fields are PARM and VALUE. */
3514 tree
3515 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3517 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3518 TREE_PURPOSE (t) = parm;
3519 TREE_VALUE (t) = value;
3520 return t;
3523 /* Build a chain of TREE_LIST nodes from a vector. */
3525 tree
3526 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3528 tree ret = NULL_TREE;
3529 tree *pp = &ret;
3530 unsigned int i;
3531 tree t;
3532 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3534 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3535 pp = &TREE_CHAIN (*pp);
3537 return ret;
3540 /* Return a newly created TREE_LIST node whose
3541 purpose and value fields are PURPOSE and VALUE
3542 and whose TREE_CHAIN is CHAIN. */
3544 tree
3545 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3547 tree node;
3549 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3550 memset (node, 0, sizeof (struct tree_common));
3552 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3554 TREE_SET_CODE (node, TREE_LIST);
3555 TREE_CHAIN (node) = chain;
3556 TREE_PURPOSE (node) = purpose;
3557 TREE_VALUE (node) = value;
3558 return node;
3561 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3562 trees. */
3564 vec<tree, va_gc> *
3565 ctor_to_vec (tree ctor)
3567 vec<tree, va_gc> *vec;
3568 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3569 unsigned int ix;
3570 tree val;
3572 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3573 vec->quick_push (val);
3575 return vec;
3578 /* Return the size nominally occupied by an object of type TYPE
3579 when it resides in memory. The value is measured in units of bytes,
3580 and its data type is that normally used for type sizes
3581 (which is the first type created by make_signed_type or
3582 make_unsigned_type). */
3584 tree
3585 size_in_bytes_loc (location_t loc, const_tree type)
3587 tree t;
3589 if (type == error_mark_node)
3590 return integer_zero_node;
3592 type = TYPE_MAIN_VARIANT (type);
3593 t = TYPE_SIZE_UNIT (type);
3595 if (t == 0)
3597 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3598 return size_zero_node;
3601 return t;
3604 /* Return the size of TYPE (in bytes) as a wide integer
3605 or return -1 if the size can vary or is larger than an integer. */
3607 HOST_WIDE_INT
3608 int_size_in_bytes (const_tree type)
3610 tree t;
3612 if (type == error_mark_node)
3613 return 0;
3615 type = TYPE_MAIN_VARIANT (type);
3616 t = TYPE_SIZE_UNIT (type);
3618 if (t && tree_fits_uhwi_p (t))
3619 return TREE_INT_CST_LOW (t);
3620 else
3621 return -1;
3624 /* Return the maximum size of TYPE (in bytes) as a wide integer
3625 or return -1 if the size can vary or is larger than an integer. */
3627 HOST_WIDE_INT
3628 max_int_size_in_bytes (const_tree type)
3630 HOST_WIDE_INT size = -1;
3631 tree size_tree;
3633 /* If this is an array type, check for a possible MAX_SIZE attached. */
3635 if (TREE_CODE (type) == ARRAY_TYPE)
3637 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3639 if (size_tree && tree_fits_uhwi_p (size_tree))
3640 size = tree_to_uhwi (size_tree);
3643 /* If we still haven't been able to get a size, see if the language
3644 can compute a maximum size. */
3646 if (size == -1)
3648 size_tree = lang_hooks.types.max_size (type);
3650 if (size_tree && tree_fits_uhwi_p (size_tree))
3651 size = tree_to_uhwi (size_tree);
3654 return size;
3657 /* Return the bit position of FIELD, in bits from the start of the record.
3658 This is a tree of type bitsizetype. */
3660 tree
3661 bit_position (const_tree field)
3663 return bit_from_pos (DECL_FIELD_OFFSET (field),
3664 DECL_FIELD_BIT_OFFSET (field));
3667 /* Return the byte position of FIELD, in bytes from the start of the record.
3668 This is a tree of type sizetype. */
3670 tree
3671 byte_position (const_tree field)
3673 return byte_from_pos (DECL_FIELD_OFFSET (field),
3674 DECL_FIELD_BIT_OFFSET (field));
3677 /* Likewise, but return as an integer. It must be representable in
3678 that way (since it could be a signed value, we don't have the
3679 option of returning -1 like int_size_in_byte can. */
3681 HOST_WIDE_INT
3682 int_byte_position (const_tree field)
3684 return tree_to_shwi (byte_position (field));
3687 /* Return, as a tree node, the number of elements for TYPE (which is an
3688 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3690 tree
3691 array_type_nelts (const_tree type)
3693 tree index_type, min, max;
3695 /* If they did it with unspecified bounds, then we should have already
3696 given an error about it before we got here. */
3697 if (! TYPE_DOMAIN (type))
3698 return error_mark_node;
3700 index_type = TYPE_DOMAIN (type);
3701 min = TYPE_MIN_VALUE (index_type);
3702 max = TYPE_MAX_VALUE (index_type);
3704 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3705 if (!max)
3707 /* zero sized arrays are represented from C FE as complete types with
3708 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3709 them as min 0, max -1. */
3710 if (COMPLETE_TYPE_P (type)
3711 && integer_zerop (TYPE_SIZE (type))
3712 && integer_zerop (min))
3713 return build_int_cst (TREE_TYPE (min), -1);
3715 return error_mark_node;
3718 return (integer_zerop (min)
3719 ? max
3720 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3723 /* If arg is static -- a reference to an object in static storage -- then
3724 return the object. This is not the same as the C meaning of `static'.
3725 If arg isn't static, return NULL. */
3727 tree
3728 staticp (tree arg)
3730 switch (TREE_CODE (arg))
3732 case FUNCTION_DECL:
3733 /* Nested functions are static, even though taking their address will
3734 involve a trampoline as we unnest the nested function and create
3735 the trampoline on the tree level. */
3736 return arg;
3738 case VAR_DECL:
3739 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3740 && ! DECL_THREAD_LOCAL_P (arg)
3741 && ! DECL_DLLIMPORT_P (arg)
3742 ? arg : NULL);
3744 case CONST_DECL:
3745 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3746 ? arg : NULL);
3748 case CONSTRUCTOR:
3749 return TREE_STATIC (arg) ? arg : NULL;
3751 case LABEL_DECL:
3752 case STRING_CST:
3753 return arg;
3755 case COMPONENT_REF:
3756 /* If the thing being referenced is not a field, then it is
3757 something language specific. */
3758 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3760 /* If we are referencing a bitfield, we can't evaluate an
3761 ADDR_EXPR at compile time and so it isn't a constant. */
3762 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3763 return NULL;
3765 return staticp (TREE_OPERAND (arg, 0));
3767 case BIT_FIELD_REF:
3768 return NULL;
3770 case INDIRECT_REF:
3771 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3773 case ARRAY_REF:
3774 case ARRAY_RANGE_REF:
3775 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3776 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3777 return staticp (TREE_OPERAND (arg, 0));
3778 else
3779 return NULL;
3781 case COMPOUND_LITERAL_EXPR:
3782 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3784 default:
3785 return NULL;
3792 /* Return whether OP is a DECL whose address is function-invariant. */
3794 bool
3795 decl_address_invariant_p (const_tree op)
3797 /* The conditions below are slightly less strict than the one in
3798 staticp. */
3800 switch (TREE_CODE (op))
3802 case PARM_DECL:
3803 case RESULT_DECL:
3804 case LABEL_DECL:
3805 case FUNCTION_DECL:
3806 return true;
3808 case VAR_DECL:
3809 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3810 || DECL_THREAD_LOCAL_P (op)
3811 || DECL_CONTEXT (op) == current_function_decl
3812 || decl_function_context (op) == current_function_decl)
3813 return true;
3814 break;
3816 case CONST_DECL:
3817 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3818 || decl_function_context (op) == current_function_decl)
3819 return true;
3820 break;
3822 default:
3823 break;
3826 return false;
3829 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3831 bool
3832 decl_address_ip_invariant_p (const_tree op)
3834 /* The conditions below are slightly less strict than the one in
3835 staticp. */
3837 switch (TREE_CODE (op))
3839 case LABEL_DECL:
3840 case FUNCTION_DECL:
3841 case STRING_CST:
3842 return true;
3844 case VAR_DECL:
3845 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3846 && !DECL_DLLIMPORT_P (op))
3847 || DECL_THREAD_LOCAL_P (op))
3848 return true;
3849 break;
3851 case CONST_DECL:
3852 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3853 return true;
3854 break;
3856 default:
3857 break;
3860 return false;
3864 /* Return true if T is function-invariant (internal function, does
3865 not handle arithmetic; that's handled in skip_simple_arithmetic and
3866 tree_invariant_p). */
3868 static bool
3869 tree_invariant_p_1 (tree t)
3871 tree op;
3873 if (TREE_CONSTANT (t)
3874 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3875 return true;
3877 switch (TREE_CODE (t))
3879 case SAVE_EXPR:
3880 return true;
3882 case ADDR_EXPR:
3883 op = TREE_OPERAND (t, 0);
3884 while (handled_component_p (op))
3886 switch (TREE_CODE (op))
3888 case ARRAY_REF:
3889 case ARRAY_RANGE_REF:
3890 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3891 || TREE_OPERAND (op, 2) != NULL_TREE
3892 || TREE_OPERAND (op, 3) != NULL_TREE)
3893 return false;
3894 break;
3896 case COMPONENT_REF:
3897 if (TREE_OPERAND (op, 2) != NULL_TREE)
3898 return false;
3899 break;
3901 default:;
3903 op = TREE_OPERAND (op, 0);
3906 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3908 default:
3909 break;
3912 return false;
3915 /* Return true if T is function-invariant. */
3917 bool
3918 tree_invariant_p (tree t)
3920 tree inner = skip_simple_arithmetic (t);
3921 return tree_invariant_p_1 (inner);
3924 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3925 Do this to any expression which may be used in more than one place,
3926 but must be evaluated only once.
3928 Normally, expand_expr would reevaluate the expression each time.
3929 Calling save_expr produces something that is evaluated and recorded
3930 the first time expand_expr is called on it. Subsequent calls to
3931 expand_expr just reuse the recorded value.
3933 The call to expand_expr that generates code that actually computes
3934 the value is the first call *at compile time*. Subsequent calls
3935 *at compile time* generate code to use the saved value.
3936 This produces correct result provided that *at run time* control
3937 always flows through the insns made by the first expand_expr
3938 before reaching the other places where the save_expr was evaluated.
3939 You, the caller of save_expr, must make sure this is so.
3941 Constants, and certain read-only nodes, are returned with no
3942 SAVE_EXPR because that is safe. Expressions containing placeholders
3943 are not touched; see tree.def for an explanation of what these
3944 are used for. */
3946 tree
3947 save_expr (tree expr)
3949 tree inner;
3951 /* If the tree evaluates to a constant, then we don't want to hide that
3952 fact (i.e. this allows further folding, and direct checks for constants).
3953 However, a read-only object that has side effects cannot be bypassed.
3954 Since it is no problem to reevaluate literals, we just return the
3955 literal node. */
3956 inner = skip_simple_arithmetic (expr);
3957 if (TREE_CODE (inner) == ERROR_MARK)
3958 return inner;
3960 if (tree_invariant_p_1 (inner))
3961 return expr;
3963 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3964 it means that the size or offset of some field of an object depends on
3965 the value within another field.
3967 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3968 and some variable since it would then need to be both evaluated once and
3969 evaluated more than once. Front-ends must assure this case cannot
3970 happen by surrounding any such subexpressions in their own SAVE_EXPR
3971 and forcing evaluation at the proper time. */
3972 if (contains_placeholder_p (inner))
3973 return expr;
3975 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3977 /* This expression might be placed ahead of a jump to ensure that the
3978 value was computed on both sides of the jump. So make sure it isn't
3979 eliminated as dead. */
3980 TREE_SIDE_EFFECTS (expr) = 1;
3981 return expr;
3984 /* Look inside EXPR into any simple arithmetic operations. Return the
3985 outermost non-arithmetic or non-invariant node. */
3987 tree
3988 skip_simple_arithmetic (tree expr)
3990 /* We don't care about whether this can be used as an lvalue in this
3991 context. */
3992 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3993 expr = TREE_OPERAND (expr, 0);
3995 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3996 a constant, it will be more efficient to not make another SAVE_EXPR since
3997 it will allow better simplification and GCSE will be able to merge the
3998 computations if they actually occur. */
3999 while (true)
4001 if (UNARY_CLASS_P (expr))
4002 expr = TREE_OPERAND (expr, 0);
4003 else if (BINARY_CLASS_P (expr))
4005 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4006 expr = TREE_OPERAND (expr, 0);
4007 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4008 expr = TREE_OPERAND (expr, 1);
4009 else
4010 break;
4012 else
4013 break;
4016 return expr;
4019 /* Look inside EXPR into simple arithmetic operations involving constants.
4020 Return the outermost non-arithmetic or non-constant node. */
4022 tree
4023 skip_simple_constant_arithmetic (tree expr)
4025 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4026 expr = TREE_OPERAND (expr, 0);
4028 while (true)
4030 if (UNARY_CLASS_P (expr))
4031 expr = TREE_OPERAND (expr, 0);
4032 else if (BINARY_CLASS_P (expr))
4034 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4035 expr = TREE_OPERAND (expr, 0);
4036 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4037 expr = TREE_OPERAND (expr, 1);
4038 else
4039 break;
4041 else
4042 break;
4045 return expr;
4048 /* Return which tree structure is used by T. */
4050 enum tree_node_structure_enum
4051 tree_node_structure (const_tree t)
4053 const enum tree_code code = TREE_CODE (t);
4054 return tree_node_structure_for_code (code);
4057 /* Set various status flags when building a CALL_EXPR object T. */
4059 static void
4060 process_call_operands (tree t)
4062 bool side_effects = TREE_SIDE_EFFECTS (t);
4063 bool read_only = false;
4064 int i = call_expr_flags (t);
4066 /* Calls have side-effects, except those to const or pure functions. */
4067 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4068 side_effects = true;
4069 /* Propagate TREE_READONLY of arguments for const functions. */
4070 if (i & ECF_CONST)
4071 read_only = true;
4073 if (!side_effects || read_only)
4074 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4076 tree op = TREE_OPERAND (t, i);
4077 if (op && TREE_SIDE_EFFECTS (op))
4078 side_effects = true;
4079 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4080 read_only = false;
4083 TREE_SIDE_EFFECTS (t) = side_effects;
4084 TREE_READONLY (t) = read_only;
4087 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4088 size or offset that depends on a field within a record. */
4090 bool
4091 contains_placeholder_p (const_tree exp)
4093 enum tree_code code;
4095 if (!exp)
4096 return false;
4098 code = TREE_CODE (exp);
4099 if (code == PLACEHOLDER_EXPR)
4100 return true;
4102 switch (TREE_CODE_CLASS (code))
4104 case tcc_reference:
4105 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4106 position computations since they will be converted into a
4107 WITH_RECORD_EXPR involving the reference, which will assume
4108 here will be valid. */
4109 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4111 case tcc_exceptional:
4112 if (code == TREE_LIST)
4113 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4114 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4115 break;
4117 case tcc_unary:
4118 case tcc_binary:
4119 case tcc_comparison:
4120 case tcc_expression:
4121 switch (code)
4123 case COMPOUND_EXPR:
4124 /* Ignoring the first operand isn't quite right, but works best. */
4125 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4127 case COND_EXPR:
4128 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4129 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4130 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4132 case SAVE_EXPR:
4133 /* The save_expr function never wraps anything containing
4134 a PLACEHOLDER_EXPR. */
4135 return false;
4137 default:
4138 break;
4141 switch (TREE_CODE_LENGTH (code))
4143 case 1:
4144 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4145 case 2:
4146 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4147 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4148 default:
4149 return false;
4152 case tcc_vl_exp:
4153 switch (code)
4155 case CALL_EXPR:
4157 const_tree arg;
4158 const_call_expr_arg_iterator iter;
4159 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4160 if (CONTAINS_PLACEHOLDER_P (arg))
4161 return true;
4162 return false;
4164 default:
4165 return false;
4168 default:
4169 return false;
4171 return false;
4174 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4175 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4176 field positions. */
4178 static bool
4179 type_contains_placeholder_1 (const_tree type)
4181 /* If the size contains a placeholder or the parent type (component type in
4182 the case of arrays) type involves a placeholder, this type does. */
4183 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4184 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4185 || (!POINTER_TYPE_P (type)
4186 && TREE_TYPE (type)
4187 && type_contains_placeholder_p (TREE_TYPE (type))))
4188 return true;
4190 /* Now do type-specific checks. Note that the last part of the check above
4191 greatly limits what we have to do below. */
4192 switch (TREE_CODE (type))
4194 case VOID_TYPE:
4195 case OPAQUE_TYPE:
4196 case COMPLEX_TYPE:
4197 case ENUMERAL_TYPE:
4198 case BOOLEAN_TYPE:
4199 case POINTER_TYPE:
4200 case OFFSET_TYPE:
4201 case REFERENCE_TYPE:
4202 case METHOD_TYPE:
4203 case FUNCTION_TYPE:
4204 case VECTOR_TYPE:
4205 case NULLPTR_TYPE:
4206 return false;
4208 case INTEGER_TYPE:
4209 case REAL_TYPE:
4210 case FIXED_POINT_TYPE:
4211 /* Here we just check the bounds. */
4212 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4213 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4215 case ARRAY_TYPE:
4216 /* We have already checked the component type above, so just check
4217 the domain type. Flexible array members have a null domain. */
4218 return TYPE_DOMAIN (type) ?
4219 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4221 case RECORD_TYPE:
4222 case UNION_TYPE:
4223 case QUAL_UNION_TYPE:
4225 tree field;
4227 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4228 if (TREE_CODE (field) == FIELD_DECL
4229 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4230 || (TREE_CODE (type) == QUAL_UNION_TYPE
4231 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4232 || type_contains_placeholder_p (TREE_TYPE (field))))
4233 return true;
4235 return false;
4238 default:
4239 gcc_unreachable ();
4243 /* Wrapper around above function used to cache its result. */
4245 bool
4246 type_contains_placeholder_p (tree type)
4248 bool result;
4250 /* If the contains_placeholder_bits field has been initialized,
4251 then we know the answer. */
4252 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4253 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4255 /* Indicate that we've seen this type node, and the answer is false.
4256 This is what we want to return if we run into recursion via fields. */
4257 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4259 /* Compute the real value. */
4260 result = type_contains_placeholder_1 (type);
4262 /* Store the real value. */
4263 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4265 return result;
4268 /* Push tree EXP onto vector QUEUE if it is not already present. */
4270 static void
4271 push_without_duplicates (tree exp, vec<tree> *queue)
4273 unsigned int i;
4274 tree iter;
4276 FOR_EACH_VEC_ELT (*queue, i, iter)
4277 if (simple_cst_equal (iter, exp) == 1)
4278 break;
4280 if (!iter)
4281 queue->safe_push (exp);
4284 /* Given a tree EXP, find all occurrences of references to fields
4285 in a PLACEHOLDER_EXPR and place them in vector REFS without
4286 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4287 we assume here that EXP contains only arithmetic expressions
4288 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4289 argument list. */
4291 void
4292 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4294 enum tree_code code = TREE_CODE (exp);
4295 tree inner;
4296 int i;
4298 /* We handle TREE_LIST and COMPONENT_REF separately. */
4299 if (code == TREE_LIST)
4301 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4302 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4304 else if (code == COMPONENT_REF)
4306 for (inner = TREE_OPERAND (exp, 0);
4307 REFERENCE_CLASS_P (inner);
4308 inner = TREE_OPERAND (inner, 0))
4311 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4312 push_without_duplicates (exp, refs);
4313 else
4314 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4316 else
4317 switch (TREE_CODE_CLASS (code))
4319 case tcc_constant:
4320 break;
4322 case tcc_declaration:
4323 /* Variables allocated to static storage can stay. */
4324 if (!TREE_STATIC (exp))
4325 push_without_duplicates (exp, refs);
4326 break;
4328 case tcc_expression:
4329 /* This is the pattern built in ada/make_aligning_type. */
4330 if (code == ADDR_EXPR
4331 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4333 push_without_duplicates (exp, refs);
4334 break;
4337 /* Fall through. */
4339 case tcc_exceptional:
4340 case tcc_unary:
4341 case tcc_binary:
4342 case tcc_comparison:
4343 case tcc_reference:
4344 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4345 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4346 break;
4348 case tcc_vl_exp:
4349 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4350 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4351 break;
4353 default:
4354 gcc_unreachable ();
4358 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4359 return a tree with all occurrences of references to F in a
4360 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4361 CONST_DECLs. Note that we assume here that EXP contains only
4362 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4363 occurring only in their argument list. */
4365 tree
4366 substitute_in_expr (tree exp, tree f, tree r)
4368 enum tree_code code = TREE_CODE (exp);
4369 tree op0, op1, op2, op3;
4370 tree new_tree;
4372 /* We handle TREE_LIST and COMPONENT_REF separately. */
4373 if (code == TREE_LIST)
4375 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4376 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4377 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4378 return exp;
4380 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4382 else if (code == COMPONENT_REF)
4384 tree inner;
4386 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4387 and it is the right field, replace it with R. */
4388 for (inner = TREE_OPERAND (exp, 0);
4389 REFERENCE_CLASS_P (inner);
4390 inner = TREE_OPERAND (inner, 0))
4393 /* The field. */
4394 op1 = TREE_OPERAND (exp, 1);
4396 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4397 return r;
4399 /* If this expression hasn't been completed let, leave it alone. */
4400 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4401 return exp;
4403 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4404 if (op0 == TREE_OPERAND (exp, 0))
4405 return exp;
4407 new_tree
4408 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4410 else
4411 switch (TREE_CODE_CLASS (code))
4413 case tcc_constant:
4414 return exp;
4416 case tcc_declaration:
4417 if (exp == f)
4418 return r;
4419 else
4420 return exp;
4422 case tcc_expression:
4423 if (exp == f)
4424 return r;
4426 /* Fall through. */
4428 case tcc_exceptional:
4429 case tcc_unary:
4430 case tcc_binary:
4431 case tcc_comparison:
4432 case tcc_reference:
4433 switch (TREE_CODE_LENGTH (code))
4435 case 0:
4436 return exp;
4438 case 1:
4439 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4440 if (op0 == TREE_OPERAND (exp, 0))
4441 return exp;
4443 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4444 break;
4446 case 2:
4447 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4448 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4450 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4451 return exp;
4453 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4454 break;
4456 case 3:
4457 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4458 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4459 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4461 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4462 && op2 == TREE_OPERAND (exp, 2))
4463 return exp;
4465 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4466 break;
4468 case 4:
4469 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4470 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4471 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4472 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4474 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4475 && op2 == TREE_OPERAND (exp, 2)
4476 && op3 == TREE_OPERAND (exp, 3))
4477 return exp;
4479 new_tree
4480 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4481 break;
4483 default:
4484 gcc_unreachable ();
4486 break;
4488 case tcc_vl_exp:
4490 int i;
4492 new_tree = NULL_TREE;
4494 /* If we are trying to replace F with a constant or with another
4495 instance of one of the arguments of the call, inline back
4496 functions which do nothing else than computing a value from
4497 the arguments they are passed. This makes it possible to
4498 fold partially or entirely the replacement expression. */
4499 if (code == CALL_EXPR)
4501 bool maybe_inline = false;
4502 if (CONSTANT_CLASS_P (r))
4503 maybe_inline = true;
4504 else
4505 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4506 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4508 maybe_inline = true;
4509 break;
4511 if (maybe_inline)
4513 tree t = maybe_inline_call_in_expr (exp);
4514 if (t)
4515 return SUBSTITUTE_IN_EXPR (t, f, r);
4519 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4521 tree op = TREE_OPERAND (exp, i);
4522 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4523 if (new_op != op)
4525 if (!new_tree)
4526 new_tree = copy_node (exp);
4527 TREE_OPERAND (new_tree, i) = new_op;
4531 if (new_tree)
4533 new_tree = fold (new_tree);
4534 if (TREE_CODE (new_tree) == CALL_EXPR)
4535 process_call_operands (new_tree);
4537 else
4538 return exp;
4540 break;
4542 default:
4543 gcc_unreachable ();
4546 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4548 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4549 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4551 return new_tree;
4554 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4555 for it within OBJ, a tree that is an object or a chain of references. */
4557 tree
4558 substitute_placeholder_in_expr (tree exp, tree obj)
4560 enum tree_code code = TREE_CODE (exp);
4561 tree op0, op1, op2, op3;
4562 tree new_tree;
4564 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4565 in the chain of OBJ. */
4566 if (code == PLACEHOLDER_EXPR)
4568 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4569 tree elt;
4571 for (elt = obj; elt != 0;
4572 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4573 || TREE_CODE (elt) == COND_EXPR)
4574 ? TREE_OPERAND (elt, 1)
4575 : (REFERENCE_CLASS_P (elt)
4576 || UNARY_CLASS_P (elt)
4577 || BINARY_CLASS_P (elt)
4578 || VL_EXP_CLASS_P (elt)
4579 || EXPRESSION_CLASS_P (elt))
4580 ? TREE_OPERAND (elt, 0) : 0))
4581 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4582 return elt;
4584 for (elt = obj; elt != 0;
4585 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4586 || TREE_CODE (elt) == COND_EXPR)
4587 ? TREE_OPERAND (elt, 1)
4588 : (REFERENCE_CLASS_P (elt)
4589 || UNARY_CLASS_P (elt)
4590 || BINARY_CLASS_P (elt)
4591 || VL_EXP_CLASS_P (elt)
4592 || EXPRESSION_CLASS_P (elt))
4593 ? TREE_OPERAND (elt, 0) : 0))
4594 if (POINTER_TYPE_P (TREE_TYPE (elt))
4595 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4596 == need_type))
4597 return fold_build1 (INDIRECT_REF, need_type, elt);
4599 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4600 survives until RTL generation, there will be an error. */
4601 return exp;
4604 /* TREE_LIST is special because we need to look at TREE_VALUE
4605 and TREE_CHAIN, not TREE_OPERANDS. */
4606 else if (code == TREE_LIST)
4608 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4609 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4610 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4611 return exp;
4613 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4615 else
4616 switch (TREE_CODE_CLASS (code))
4618 case tcc_constant:
4619 case tcc_declaration:
4620 return exp;
4622 case tcc_exceptional:
4623 case tcc_unary:
4624 case tcc_binary:
4625 case tcc_comparison:
4626 case tcc_expression:
4627 case tcc_reference:
4628 case tcc_statement:
4629 switch (TREE_CODE_LENGTH (code))
4631 case 0:
4632 return exp;
4634 case 1:
4635 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4636 if (op0 == TREE_OPERAND (exp, 0))
4637 return exp;
4639 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4640 break;
4642 case 2:
4643 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4644 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4646 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4647 return exp;
4649 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4650 break;
4652 case 3:
4653 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4654 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4655 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4657 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4658 && op2 == TREE_OPERAND (exp, 2))
4659 return exp;
4661 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4662 break;
4664 case 4:
4665 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4666 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4667 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4668 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4670 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4671 && op2 == TREE_OPERAND (exp, 2)
4672 && op3 == TREE_OPERAND (exp, 3))
4673 return exp;
4675 new_tree
4676 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4677 break;
4679 default:
4680 gcc_unreachable ();
4682 break;
4684 case tcc_vl_exp:
4686 int i;
4688 new_tree = NULL_TREE;
4690 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4692 tree op = TREE_OPERAND (exp, i);
4693 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4694 if (new_op != op)
4696 if (!new_tree)
4697 new_tree = copy_node (exp);
4698 TREE_OPERAND (new_tree, i) = new_op;
4702 if (new_tree)
4704 new_tree = fold (new_tree);
4705 if (TREE_CODE (new_tree) == CALL_EXPR)
4706 process_call_operands (new_tree);
4708 else
4709 return exp;
4711 break;
4713 default:
4714 gcc_unreachable ();
4717 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4719 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4720 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4722 return new_tree;
4726 /* Subroutine of stabilize_reference; this is called for subtrees of
4727 references. Any expression with side-effects must be put in a SAVE_EXPR
4728 to ensure that it is only evaluated once.
4730 We don't put SAVE_EXPR nodes around everything, because assigning very
4731 simple expressions to temporaries causes us to miss good opportunities
4732 for optimizations. Among other things, the opportunity to fold in the
4733 addition of a constant into an addressing mode often gets lost, e.g.
4734 "y[i+1] += x;". In general, we take the approach that we should not make
4735 an assignment unless we are forced into it - i.e., that any non-side effect
4736 operator should be allowed, and that cse should take care of coalescing
4737 multiple utterances of the same expression should that prove fruitful. */
4739 static tree
4740 stabilize_reference_1 (tree e)
4742 tree result;
4743 enum tree_code code = TREE_CODE (e);
4745 /* We cannot ignore const expressions because it might be a reference
4746 to a const array but whose index contains side-effects. But we can
4747 ignore things that are actual constant or that already have been
4748 handled by this function. */
4750 if (tree_invariant_p (e))
4751 return e;
4753 switch (TREE_CODE_CLASS (code))
4755 case tcc_exceptional:
4756 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4757 have side-effects. */
4758 if (code == STATEMENT_LIST)
4759 return save_expr (e);
4760 /* FALLTHRU */
4761 case tcc_type:
4762 case tcc_declaration:
4763 case tcc_comparison:
4764 case tcc_statement:
4765 case tcc_expression:
4766 case tcc_reference:
4767 case tcc_vl_exp:
4768 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4769 so that it will only be evaluated once. */
4770 /* The reference (r) and comparison (<) classes could be handled as
4771 below, but it is generally faster to only evaluate them once. */
4772 if (TREE_SIDE_EFFECTS (e))
4773 return save_expr (e);
4774 return e;
4776 case tcc_constant:
4777 /* Constants need no processing. In fact, we should never reach
4778 here. */
4779 return e;
4781 case tcc_binary:
4782 /* Division is slow and tends to be compiled with jumps,
4783 especially the division by powers of 2 that is often
4784 found inside of an array reference. So do it just once. */
4785 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4786 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4787 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4788 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4789 return save_expr (e);
4790 /* Recursively stabilize each operand. */
4791 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4792 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4793 break;
4795 case tcc_unary:
4796 /* Recursively stabilize each operand. */
4797 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4798 break;
4800 default:
4801 gcc_unreachable ();
4804 TREE_TYPE (result) = TREE_TYPE (e);
4805 TREE_READONLY (result) = TREE_READONLY (e);
4806 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4807 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4809 return result;
4812 /* Stabilize a reference so that we can use it any number of times
4813 without causing its operands to be evaluated more than once.
4814 Returns the stabilized reference. This works by means of save_expr,
4815 so see the caveats in the comments about save_expr.
4817 Also allows conversion expressions whose operands are references.
4818 Any other kind of expression is returned unchanged. */
4820 tree
4821 stabilize_reference (tree ref)
4823 tree result;
4824 enum tree_code code = TREE_CODE (ref);
4826 switch (code)
4828 case VAR_DECL:
4829 case PARM_DECL:
4830 case RESULT_DECL:
4831 /* No action is needed in this case. */
4832 return ref;
4834 CASE_CONVERT:
4835 case FLOAT_EXPR:
4836 case FIX_TRUNC_EXPR:
4837 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4838 break;
4840 case INDIRECT_REF:
4841 result = build_nt (INDIRECT_REF,
4842 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4843 break;
4845 case COMPONENT_REF:
4846 result = build_nt (COMPONENT_REF,
4847 stabilize_reference (TREE_OPERAND (ref, 0)),
4848 TREE_OPERAND (ref, 1), NULL_TREE);
4849 break;
4851 case BIT_FIELD_REF:
4852 result = build_nt (BIT_FIELD_REF,
4853 stabilize_reference (TREE_OPERAND (ref, 0)),
4854 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4855 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4856 break;
4858 case ARRAY_REF:
4859 result = build_nt (ARRAY_REF,
4860 stabilize_reference (TREE_OPERAND (ref, 0)),
4861 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4862 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4863 break;
4865 case ARRAY_RANGE_REF:
4866 result = build_nt (ARRAY_RANGE_REF,
4867 stabilize_reference (TREE_OPERAND (ref, 0)),
4868 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4869 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4870 break;
4872 case COMPOUND_EXPR:
4873 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4874 it wouldn't be ignored. This matters when dealing with
4875 volatiles. */
4876 return stabilize_reference_1 (ref);
4878 /* If arg isn't a kind of lvalue we recognize, make no change.
4879 Caller should recognize the error for an invalid lvalue. */
4880 default:
4881 return ref;
4883 case ERROR_MARK:
4884 return error_mark_node;
4887 TREE_TYPE (result) = TREE_TYPE (ref);
4888 TREE_READONLY (result) = TREE_READONLY (ref);
4889 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4890 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4891 protected_set_expr_location (result, EXPR_LOCATION (ref));
4893 return result;
4896 /* Low-level constructors for expressions. */
4898 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4899 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4901 void
4902 recompute_tree_invariant_for_addr_expr (tree t)
4904 tree node;
4905 bool tc = true, se = false;
4907 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4909 /* We started out assuming this address is both invariant and constant, but
4910 does not have side effects. Now go down any handled components and see if
4911 any of them involve offsets that are either non-constant or non-invariant.
4912 Also check for side-effects.
4914 ??? Note that this code makes no attempt to deal with the case where
4915 taking the address of something causes a copy due to misalignment. */
4917 #define UPDATE_FLAGS(NODE) \
4918 do { tree _node = (NODE); \
4919 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4920 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4922 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4923 node = TREE_OPERAND (node, 0))
4925 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4926 array reference (probably made temporarily by the G++ front end),
4927 so ignore all the operands. */
4928 if ((TREE_CODE (node) == ARRAY_REF
4929 || TREE_CODE (node) == ARRAY_RANGE_REF)
4930 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4932 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4933 if (TREE_OPERAND (node, 2))
4934 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4935 if (TREE_OPERAND (node, 3))
4936 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4938 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4939 FIELD_DECL, apparently. The G++ front end can put something else
4940 there, at least temporarily. */
4941 else if (TREE_CODE (node) == COMPONENT_REF
4942 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4944 if (TREE_OPERAND (node, 2))
4945 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4949 node = lang_hooks.expr_to_decl (node, &tc, &se);
4951 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4952 the address, since &(*a)->b is a form of addition. If it's a constant, the
4953 address is constant too. If it's a decl, its address is constant if the
4954 decl is static. Everything else is not constant and, furthermore,
4955 taking the address of a volatile variable is not volatile. */
4956 if (INDIRECT_REF_P (node)
4957 || TREE_CODE (node) == MEM_REF)
4958 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4959 else if (CONSTANT_CLASS_P (node))
4961 else if (DECL_P (node))
4962 tc &= (staticp (node) != NULL_TREE);
4963 else
4965 tc = false;
4966 se |= TREE_SIDE_EFFECTS (node);
4970 TREE_CONSTANT (t) = tc;
4971 TREE_SIDE_EFFECTS (t) = se;
4972 #undef UPDATE_FLAGS
4975 /* Build an expression of code CODE, data type TYPE, and operands as
4976 specified. Expressions and reference nodes can be created this way.
4977 Constants, decls, types and misc nodes cannot be.
4979 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4980 enough for all extant tree codes. */
4982 tree
4983 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4985 tree t;
4987 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4989 t = make_node (code PASS_MEM_STAT);
4990 TREE_TYPE (t) = tt;
4992 return t;
4995 tree
4996 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4998 int length = sizeof (struct tree_exp);
4999 tree t;
5001 record_node_allocation_statistics (code, length);
5003 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5005 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5007 memset (t, 0, sizeof (struct tree_common));
5009 TREE_SET_CODE (t, code);
5011 TREE_TYPE (t) = type;
5012 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5013 TREE_OPERAND (t, 0) = node;
5014 if (node && !TYPE_P (node))
5016 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5017 TREE_READONLY (t) = TREE_READONLY (node);
5020 if (TREE_CODE_CLASS (code) == tcc_statement)
5022 if (code != DEBUG_BEGIN_STMT)
5023 TREE_SIDE_EFFECTS (t) = 1;
5025 else switch (code)
5027 case VA_ARG_EXPR:
5028 /* All of these have side-effects, no matter what their
5029 operands are. */
5030 TREE_SIDE_EFFECTS (t) = 1;
5031 TREE_READONLY (t) = 0;
5032 break;
5034 case INDIRECT_REF:
5035 /* Whether a dereference is readonly has nothing to do with whether
5036 its operand is readonly. */
5037 TREE_READONLY (t) = 0;
5038 break;
5040 case ADDR_EXPR:
5041 if (node)
5042 recompute_tree_invariant_for_addr_expr (t);
5043 break;
5045 default:
5046 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5047 && node && !TYPE_P (node)
5048 && TREE_CONSTANT (node))
5049 TREE_CONSTANT (t) = 1;
5050 if (TREE_CODE_CLASS (code) == tcc_reference
5051 && node && TREE_THIS_VOLATILE (node))
5052 TREE_THIS_VOLATILE (t) = 1;
5053 break;
5056 return t;
5059 #define PROCESS_ARG(N) \
5060 do { \
5061 TREE_OPERAND (t, N) = arg##N; \
5062 if (arg##N &&!TYPE_P (arg##N)) \
5064 if (TREE_SIDE_EFFECTS (arg##N)) \
5065 side_effects = 1; \
5066 if (!TREE_READONLY (arg##N) \
5067 && !CONSTANT_CLASS_P (arg##N)) \
5068 (void) (read_only = 0); \
5069 if (!TREE_CONSTANT (arg##N)) \
5070 (void) (constant = 0); \
5072 } while (0)
5074 tree
5075 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5077 bool constant, read_only, side_effects, div_by_zero;
5078 tree t;
5080 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5082 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5083 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5084 /* When sizetype precision doesn't match that of pointers
5085 we need to be able to build explicit extensions or truncations
5086 of the offset argument. */
5087 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5088 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5089 && TREE_CODE (arg1) == INTEGER_CST);
5091 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5092 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5093 && ptrofftype_p (TREE_TYPE (arg1)));
5095 t = make_node (code PASS_MEM_STAT);
5096 TREE_TYPE (t) = tt;
5098 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5099 result based on those same flags for the arguments. But if the
5100 arguments aren't really even `tree' expressions, we shouldn't be trying
5101 to do this. */
5103 /* Expressions without side effects may be constant if their
5104 arguments are as well. */
5105 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5106 || TREE_CODE_CLASS (code) == tcc_binary);
5107 read_only = 1;
5108 side_effects = TREE_SIDE_EFFECTS (t);
5110 switch (code)
5112 case TRUNC_DIV_EXPR:
5113 case CEIL_DIV_EXPR:
5114 case FLOOR_DIV_EXPR:
5115 case ROUND_DIV_EXPR:
5116 case EXACT_DIV_EXPR:
5117 case CEIL_MOD_EXPR:
5118 case FLOOR_MOD_EXPR:
5119 case ROUND_MOD_EXPR:
5120 case TRUNC_MOD_EXPR:
5121 div_by_zero = integer_zerop (arg1);
5122 break;
5123 default:
5124 div_by_zero = false;
5127 PROCESS_ARG (0);
5128 PROCESS_ARG (1);
5130 TREE_SIDE_EFFECTS (t) = side_effects;
5131 if (code == MEM_REF)
5133 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5135 tree o = TREE_OPERAND (arg0, 0);
5136 TREE_READONLY (t) = TREE_READONLY (o);
5137 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5140 else
5142 TREE_READONLY (t) = read_only;
5143 /* Don't mark X / 0 as constant. */
5144 TREE_CONSTANT (t) = constant && !div_by_zero;
5145 TREE_THIS_VOLATILE (t)
5146 = (TREE_CODE_CLASS (code) == tcc_reference
5147 && arg0 && TREE_THIS_VOLATILE (arg0));
5150 return t;
5154 tree
5155 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5156 tree arg2 MEM_STAT_DECL)
5158 bool constant, read_only, side_effects;
5159 tree t;
5161 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5162 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5164 t = make_node (code PASS_MEM_STAT);
5165 TREE_TYPE (t) = tt;
5167 read_only = 1;
5169 /* As a special exception, if COND_EXPR has NULL branches, we
5170 assume that it is a gimple statement and always consider
5171 it to have side effects. */
5172 if (code == COND_EXPR
5173 && tt == void_type_node
5174 && arg1 == NULL_TREE
5175 && arg2 == NULL_TREE)
5176 side_effects = true;
5177 else
5178 side_effects = TREE_SIDE_EFFECTS (t);
5180 PROCESS_ARG (0);
5181 PROCESS_ARG (1);
5182 PROCESS_ARG (2);
5184 if (code == COND_EXPR)
5185 TREE_READONLY (t) = read_only;
5187 TREE_SIDE_EFFECTS (t) = side_effects;
5188 TREE_THIS_VOLATILE (t)
5189 = (TREE_CODE_CLASS (code) == tcc_reference
5190 && arg0 && TREE_THIS_VOLATILE (arg0));
5192 return t;
5195 tree
5196 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5197 tree arg2, tree arg3 MEM_STAT_DECL)
5199 bool constant, read_only, side_effects;
5200 tree t;
5202 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5204 t = make_node (code PASS_MEM_STAT);
5205 TREE_TYPE (t) = tt;
5207 side_effects = TREE_SIDE_EFFECTS (t);
5209 PROCESS_ARG (0);
5210 PROCESS_ARG (1);
5211 PROCESS_ARG (2);
5212 PROCESS_ARG (3);
5214 TREE_SIDE_EFFECTS (t) = side_effects;
5215 TREE_THIS_VOLATILE (t)
5216 = (TREE_CODE_CLASS (code) == tcc_reference
5217 && arg0 && TREE_THIS_VOLATILE (arg0));
5219 return t;
5222 tree
5223 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5224 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5226 bool constant, read_only, side_effects;
5227 tree t;
5229 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5231 t = make_node (code PASS_MEM_STAT);
5232 TREE_TYPE (t) = tt;
5234 side_effects = TREE_SIDE_EFFECTS (t);
5236 PROCESS_ARG (0);
5237 PROCESS_ARG (1);
5238 PROCESS_ARG (2);
5239 PROCESS_ARG (3);
5240 PROCESS_ARG (4);
5242 TREE_SIDE_EFFECTS (t) = side_effects;
5243 if (code == TARGET_MEM_REF)
5245 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5247 tree o = TREE_OPERAND (arg0, 0);
5248 TREE_READONLY (t) = TREE_READONLY (o);
5249 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5252 else
5253 TREE_THIS_VOLATILE (t)
5254 = (TREE_CODE_CLASS (code) == tcc_reference
5255 && arg0 && TREE_THIS_VOLATILE (arg0));
5257 return t;
5260 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5261 on the pointer PTR. */
5263 tree
5264 build_simple_mem_ref_loc (location_t loc, tree ptr)
5266 poly_int64 offset = 0;
5267 tree ptype = TREE_TYPE (ptr);
5268 tree tem;
5269 /* For convenience allow addresses that collapse to a simple base
5270 and offset. */
5271 if (TREE_CODE (ptr) == ADDR_EXPR
5272 && (handled_component_p (TREE_OPERAND (ptr, 0))
5273 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5275 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5276 gcc_assert (ptr);
5277 if (TREE_CODE (ptr) == MEM_REF)
5279 offset += mem_ref_offset (ptr).force_shwi ();
5280 ptr = TREE_OPERAND (ptr, 0);
5282 else
5283 ptr = build_fold_addr_expr (ptr);
5284 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5286 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5287 ptr, build_int_cst (ptype, offset));
5288 SET_EXPR_LOCATION (tem, loc);
5289 return tem;
5292 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5294 poly_offset_int
5295 mem_ref_offset (const_tree t)
5297 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5298 SIGNED);
5301 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5302 offsetted by OFFSET units. */
5304 tree
5305 build_invariant_address (tree type, tree base, poly_int64 offset)
5307 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5308 build_fold_addr_expr (base),
5309 build_int_cst (ptr_type_node, offset));
5310 tree addr = build1 (ADDR_EXPR, type, ref);
5311 recompute_tree_invariant_for_addr_expr (addr);
5312 return addr;
5315 /* Similar except don't specify the TREE_TYPE
5316 and leave the TREE_SIDE_EFFECTS as 0.
5317 It is permissible for arguments to be null,
5318 or even garbage if their values do not matter. */
5320 tree
5321 build_nt (enum tree_code code, ...)
5323 tree t;
5324 int length;
5325 int i;
5326 va_list p;
5328 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5330 va_start (p, code);
5332 t = make_node (code);
5333 length = TREE_CODE_LENGTH (code);
5335 for (i = 0; i < length; i++)
5336 TREE_OPERAND (t, i) = va_arg (p, tree);
5338 va_end (p);
5339 return t;
5342 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5343 tree vec. */
5345 tree
5346 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5348 tree ret, t;
5349 unsigned int ix;
5351 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5352 CALL_EXPR_FN (ret) = fn;
5353 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5354 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5355 CALL_EXPR_ARG (ret, ix) = t;
5356 return ret;
5359 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5360 and data type TYPE.
5361 We do NOT enter this node in any sort of symbol table.
5363 LOC is the location of the decl.
5365 layout_decl is used to set up the decl's storage layout.
5366 Other slots are initialized to 0 or null pointers. */
5368 tree
5369 build_decl (location_t loc, enum tree_code code, tree name,
5370 tree type MEM_STAT_DECL)
5372 tree t;
5374 t = make_node (code PASS_MEM_STAT);
5375 DECL_SOURCE_LOCATION (t) = loc;
5377 /* if (type == error_mark_node)
5378 type = integer_type_node; */
5379 /* That is not done, deliberately, so that having error_mark_node
5380 as the type can suppress useless errors in the use of this variable. */
5382 DECL_NAME (t) = name;
5383 TREE_TYPE (t) = type;
5385 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5386 layout_decl (t, 0);
5388 return t;
5391 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5393 tree
5394 build_debug_expr_decl (tree type)
5396 tree vexpr = make_node (DEBUG_EXPR_DECL);
5397 DECL_ARTIFICIAL (vexpr) = 1;
5398 TREE_TYPE (vexpr) = type;
5399 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5400 return vexpr;
5403 /* Builds and returns function declaration with NAME and TYPE. */
5405 tree
5406 build_fn_decl (const char *name, tree type)
5408 tree id = get_identifier (name);
5409 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5411 DECL_EXTERNAL (decl) = 1;
5412 TREE_PUBLIC (decl) = 1;
5413 DECL_ARTIFICIAL (decl) = 1;
5414 TREE_NOTHROW (decl) = 1;
5416 return decl;
5419 vec<tree, va_gc> *all_translation_units;
5421 /* Builds a new translation-unit decl with name NAME, queues it in the
5422 global list of translation-unit decls and returns it. */
5424 tree
5425 build_translation_unit_decl (tree name)
5427 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5428 name, NULL_TREE);
5429 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5430 vec_safe_push (all_translation_units, tu);
5431 return tu;
5435 /* BLOCK nodes are used to represent the structure of binding contours
5436 and declarations, once those contours have been exited and their contents
5437 compiled. This information is used for outputting debugging info. */
5439 tree
5440 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5442 tree block = make_node (BLOCK);
5444 BLOCK_VARS (block) = vars;
5445 BLOCK_SUBBLOCKS (block) = subblocks;
5446 BLOCK_SUPERCONTEXT (block) = supercontext;
5447 BLOCK_CHAIN (block) = chain;
5448 return block;
5452 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5454 LOC is the location to use in tree T. */
5456 void
5457 protected_set_expr_location (tree t, location_t loc)
5459 if (CAN_HAVE_LOCATION_P (t))
5460 SET_EXPR_LOCATION (t, loc);
5461 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5463 t = expr_single (t);
5464 if (t && CAN_HAVE_LOCATION_P (t))
5465 SET_EXPR_LOCATION (t, loc);
5469 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5470 UNKNOWN_LOCATION. */
5472 void
5473 protected_set_expr_location_if_unset (tree t, location_t loc)
5475 t = expr_single (t);
5476 if (t && !EXPR_HAS_LOCATION (t))
5477 protected_set_expr_location (t, loc);
5480 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5481 of the various TYPE_QUAL values. */
5483 static void
5484 set_type_quals (tree type, int type_quals)
5486 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5487 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5488 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5489 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5490 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5493 /* Returns true iff CAND and BASE have equivalent language-specific
5494 qualifiers. */
5496 bool
5497 check_lang_type (const_tree cand, const_tree base)
5499 if (lang_hooks.types.type_hash_eq == NULL)
5500 return true;
5501 /* type_hash_eq currently only applies to these types. */
5502 if (TREE_CODE (cand) != FUNCTION_TYPE
5503 && TREE_CODE (cand) != METHOD_TYPE)
5504 return true;
5505 return lang_hooks.types.type_hash_eq (cand, base);
5508 /* This function checks to see if TYPE matches the size one of the built-in
5509 atomic types, and returns that core atomic type. */
5511 static tree
5512 find_atomic_core_type (const_tree type)
5514 tree base_atomic_type;
5516 /* Only handle complete types. */
5517 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5518 return NULL_TREE;
5520 switch (tree_to_uhwi (TYPE_SIZE (type)))
5522 case 8:
5523 base_atomic_type = atomicQI_type_node;
5524 break;
5526 case 16:
5527 base_atomic_type = atomicHI_type_node;
5528 break;
5530 case 32:
5531 base_atomic_type = atomicSI_type_node;
5532 break;
5534 case 64:
5535 base_atomic_type = atomicDI_type_node;
5536 break;
5538 case 128:
5539 base_atomic_type = atomicTI_type_node;
5540 break;
5542 default:
5543 base_atomic_type = NULL_TREE;
5546 return base_atomic_type;
5549 /* Returns true iff unqualified CAND and BASE are equivalent. */
5551 bool
5552 check_base_type (const_tree cand, const_tree base)
5554 if (TYPE_NAME (cand) != TYPE_NAME (base)
5555 /* Apparently this is needed for Objective-C. */
5556 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5557 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5558 TYPE_ATTRIBUTES (base)))
5559 return false;
5560 /* Check alignment. */
5561 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5562 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5563 return true;
5564 /* Atomic types increase minimal alignment. We must to do so as well
5565 or we get duplicated canonical types. See PR88686. */
5566 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5568 /* See if this object can map to a basic atomic type. */
5569 tree atomic_type = find_atomic_core_type (cand);
5570 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5571 return true;
5573 return false;
5576 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5578 bool
5579 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5581 return (TYPE_QUALS (cand) == type_quals
5582 && check_base_type (cand, base)
5583 && check_lang_type (cand, base));
5586 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5588 static bool
5589 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5591 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5592 && TYPE_NAME (cand) == TYPE_NAME (base)
5593 /* Apparently this is needed for Objective-C. */
5594 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5595 /* Check alignment. */
5596 && TYPE_ALIGN (cand) == align
5597 /* Check this is a user-aligned type as build_aligned_type
5598 would create. */
5599 && TYPE_USER_ALIGN (cand)
5600 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5601 TYPE_ATTRIBUTES (base))
5602 && check_lang_type (cand, base));
5605 /* Return a version of the TYPE, qualified as indicated by the
5606 TYPE_QUALS, if one exists. If no qualified version exists yet,
5607 return NULL_TREE. */
5609 tree
5610 get_qualified_type (tree type, int type_quals)
5612 if (TYPE_QUALS (type) == type_quals)
5613 return type;
5615 tree mv = TYPE_MAIN_VARIANT (type);
5616 if (check_qualified_type (mv, type, type_quals))
5617 return mv;
5619 /* Search the chain of variants to see if there is already one there just
5620 like the one we need to have. If so, use that existing one. We must
5621 preserve the TYPE_NAME, since there is code that depends on this. */
5622 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5623 if (check_qualified_type (*tp, type, type_quals))
5625 /* Put the found variant at the head of the variant list so
5626 frequently searched variants get found faster. The C++ FE
5627 benefits greatly from this. */
5628 tree t = *tp;
5629 *tp = TYPE_NEXT_VARIANT (t);
5630 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5631 TYPE_NEXT_VARIANT (mv) = t;
5632 return t;
5635 return NULL_TREE;
5638 /* Like get_qualified_type, but creates the type if it does not
5639 exist. This function never returns NULL_TREE. */
5641 tree
5642 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5644 tree t;
5646 /* See if we already have the appropriate qualified variant. */
5647 t = get_qualified_type (type, type_quals);
5649 /* If not, build it. */
5650 if (!t)
5652 t = build_variant_type_copy (type PASS_MEM_STAT);
5653 set_type_quals (t, type_quals);
5655 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5657 /* See if this object can map to a basic atomic type. */
5658 tree atomic_type = find_atomic_core_type (type);
5659 if (atomic_type)
5661 /* Ensure the alignment of this type is compatible with
5662 the required alignment of the atomic type. */
5663 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5664 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5668 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5669 /* Propagate structural equality. */
5670 SET_TYPE_STRUCTURAL_EQUALITY (t);
5671 else if (TYPE_CANONICAL (type) != type)
5672 /* Build the underlying canonical type, since it is different
5673 from TYPE. */
5675 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5676 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5678 else
5679 /* T is its own canonical type. */
5680 TYPE_CANONICAL (t) = t;
5684 return t;
5687 /* Create a variant of type T with alignment ALIGN. */
5689 tree
5690 build_aligned_type (tree type, unsigned int align)
5692 tree t;
5694 if (TYPE_PACKED (type)
5695 || TYPE_ALIGN (type) == align)
5696 return type;
5698 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5699 if (check_aligned_type (t, type, align))
5700 return t;
5702 t = build_variant_type_copy (type);
5703 SET_TYPE_ALIGN (t, align);
5704 TYPE_USER_ALIGN (t) = 1;
5706 return t;
5709 /* Create a new distinct copy of TYPE. The new type is made its own
5710 MAIN_VARIANT. If TYPE requires structural equality checks, the
5711 resulting type requires structural equality checks; otherwise, its
5712 TYPE_CANONICAL points to itself. */
5714 tree
5715 build_distinct_type_copy (tree type MEM_STAT_DECL)
5717 tree t = copy_node (type PASS_MEM_STAT);
5719 TYPE_POINTER_TO (t) = 0;
5720 TYPE_REFERENCE_TO (t) = 0;
5722 /* Set the canonical type either to a new equivalence class, or
5723 propagate the need for structural equality checks. */
5724 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5725 SET_TYPE_STRUCTURAL_EQUALITY (t);
5726 else
5727 TYPE_CANONICAL (t) = t;
5729 /* Make it its own variant. */
5730 TYPE_MAIN_VARIANT (t) = t;
5731 TYPE_NEXT_VARIANT (t) = 0;
5733 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5734 whose TREE_TYPE is not t. This can also happen in the Ada
5735 frontend when using subtypes. */
5737 return t;
5740 /* Create a new variant of TYPE, equivalent but distinct. This is so
5741 the caller can modify it. TYPE_CANONICAL for the return type will
5742 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5743 are considered equal by the language itself (or that both types
5744 require structural equality checks). */
5746 tree
5747 build_variant_type_copy (tree type MEM_STAT_DECL)
5749 tree t, m = TYPE_MAIN_VARIANT (type);
5751 t = build_distinct_type_copy (type PASS_MEM_STAT);
5753 /* Since we're building a variant, assume that it is a non-semantic
5754 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5755 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5756 /* Type variants have no alias set defined. */
5757 TYPE_ALIAS_SET (t) = -1;
5759 /* Add the new type to the chain of variants of TYPE. */
5760 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5761 TYPE_NEXT_VARIANT (m) = t;
5762 TYPE_MAIN_VARIANT (t) = m;
5764 return t;
5767 /* Return true if the from tree in both tree maps are equal. */
5770 tree_map_base_eq (const void *va, const void *vb)
5772 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5773 *const b = (const struct tree_map_base *) vb;
5774 return (a->from == b->from);
5777 /* Hash a from tree in a tree_base_map. */
5779 unsigned int
5780 tree_map_base_hash (const void *item)
5782 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5785 /* Return true if this tree map structure is marked for garbage collection
5786 purposes. We simply return true if the from tree is marked, so that this
5787 structure goes away when the from tree goes away. */
5789 bool
5790 tree_map_base_marked_p (const void *p)
5792 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5795 /* Hash a from tree in a tree_map. */
5797 unsigned int
5798 tree_map_hash (const void *item)
5800 return (((const struct tree_map *) item)->hash);
5803 /* Hash a from tree in a tree_decl_map. */
5805 unsigned int
5806 tree_decl_map_hash (const void *item)
5808 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5811 /* Return the initialization priority for DECL. */
5813 priority_type
5814 decl_init_priority_lookup (tree decl)
5816 symtab_node *snode = symtab_node::get (decl);
5818 if (!snode)
5819 return DEFAULT_INIT_PRIORITY;
5820 return
5821 snode->get_init_priority ();
5824 /* Return the finalization priority for DECL. */
5826 priority_type
5827 decl_fini_priority_lookup (tree decl)
5829 cgraph_node *node = cgraph_node::get (decl);
5831 if (!node)
5832 return DEFAULT_INIT_PRIORITY;
5833 return
5834 node->get_fini_priority ();
5837 /* Set the initialization priority for DECL to PRIORITY. */
5839 void
5840 decl_init_priority_insert (tree decl, priority_type priority)
5842 struct symtab_node *snode;
5844 if (priority == DEFAULT_INIT_PRIORITY)
5846 snode = symtab_node::get (decl);
5847 if (!snode)
5848 return;
5850 else if (VAR_P (decl))
5851 snode = varpool_node::get_create (decl);
5852 else
5853 snode = cgraph_node::get_create (decl);
5854 snode->set_init_priority (priority);
5857 /* Set the finalization priority for DECL to PRIORITY. */
5859 void
5860 decl_fini_priority_insert (tree decl, priority_type priority)
5862 struct cgraph_node *node;
5864 if (priority == DEFAULT_INIT_PRIORITY)
5866 node = cgraph_node::get (decl);
5867 if (!node)
5868 return;
5870 else
5871 node = cgraph_node::get_create (decl);
5872 node->set_fini_priority (priority);
5875 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5877 static void
5878 print_debug_expr_statistics (void)
5880 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5881 (long) debug_expr_for_decl->size (),
5882 (long) debug_expr_for_decl->elements (),
5883 debug_expr_for_decl->collisions ());
5886 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5888 static void
5889 print_value_expr_statistics (void)
5891 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5892 (long) value_expr_for_decl->size (),
5893 (long) value_expr_for_decl->elements (),
5894 value_expr_for_decl->collisions ());
5897 /* Lookup a debug expression for FROM, and return it if we find one. */
5899 tree
5900 decl_debug_expr_lookup (tree from)
5902 struct tree_decl_map *h, in;
5903 in.base.from = from;
5905 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5906 if (h)
5907 return h->to;
5908 return NULL_TREE;
5911 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5913 void
5914 decl_debug_expr_insert (tree from, tree to)
5916 struct tree_decl_map *h;
5918 h = ggc_alloc<tree_decl_map> ();
5919 h->base.from = from;
5920 h->to = to;
5921 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5924 /* Lookup a value expression for FROM, and return it if we find one. */
5926 tree
5927 decl_value_expr_lookup (tree from)
5929 struct tree_decl_map *h, in;
5930 in.base.from = from;
5932 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5933 if (h)
5934 return h->to;
5935 return NULL_TREE;
5938 /* Insert a mapping FROM->TO in the value expression hashtable. */
5940 void
5941 decl_value_expr_insert (tree from, tree to)
5943 struct tree_decl_map *h;
5945 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5946 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5948 h = ggc_alloc<tree_decl_map> ();
5949 h->base.from = from;
5950 h->to = to;
5951 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5954 /* Lookup a vector of debug arguments for FROM, and return it if we
5955 find one. */
5957 vec<tree, va_gc> **
5958 decl_debug_args_lookup (tree from)
5960 struct tree_vec_map *h, in;
5962 if (!DECL_HAS_DEBUG_ARGS_P (from))
5963 return NULL;
5964 gcc_checking_assert (debug_args_for_decl != NULL);
5965 in.base.from = from;
5966 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5967 if (h)
5968 return &h->to;
5969 return NULL;
5972 /* Insert a mapping FROM->empty vector of debug arguments in the value
5973 expression hashtable. */
5975 vec<tree, va_gc> **
5976 decl_debug_args_insert (tree from)
5978 struct tree_vec_map *h;
5979 tree_vec_map **loc;
5981 if (DECL_HAS_DEBUG_ARGS_P (from))
5982 return decl_debug_args_lookup (from);
5983 if (debug_args_for_decl == NULL)
5984 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5985 h = ggc_alloc<tree_vec_map> ();
5986 h->base.from = from;
5987 h->to = NULL;
5988 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5989 *loc = h;
5990 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5991 return &h->to;
5994 /* Hashing of types so that we don't make duplicates.
5995 The entry point is `type_hash_canon'. */
5997 /* Generate the default hash code for TYPE. This is designed for
5998 speed, rather than maximum entropy. */
6000 hashval_t
6001 type_hash_canon_hash (tree type)
6003 inchash::hash hstate;
6005 hstate.add_int (TREE_CODE (type));
6007 if (TREE_TYPE (type))
6008 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6010 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6011 /* Just the identifier is adequate to distinguish. */
6012 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6014 switch (TREE_CODE (type))
6016 case METHOD_TYPE:
6017 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6018 /* FALLTHROUGH. */
6019 case FUNCTION_TYPE:
6020 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6021 if (TREE_VALUE (t) != error_mark_node)
6022 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6023 break;
6025 case OFFSET_TYPE:
6026 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6027 break;
6029 case ARRAY_TYPE:
6031 if (TYPE_DOMAIN (type))
6032 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6033 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6035 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6036 hstate.add_object (typeless);
6039 break;
6041 case INTEGER_TYPE:
6043 tree t = TYPE_MAX_VALUE (type);
6044 if (!t)
6045 t = TYPE_MIN_VALUE (type);
6046 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6047 hstate.add_object (TREE_INT_CST_ELT (t, i));
6048 break;
6051 case BITINT_TYPE:
6053 unsigned prec = TYPE_PRECISION (type);
6054 unsigned uns = TYPE_UNSIGNED (type);
6055 hstate.add_object (prec);
6056 hstate.add_int (uns);
6057 break;
6060 case REAL_TYPE:
6061 case FIXED_POINT_TYPE:
6063 unsigned prec = TYPE_PRECISION (type);
6064 hstate.add_object (prec);
6065 break;
6068 case VECTOR_TYPE:
6069 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6070 break;
6072 default:
6073 break;
6076 return hstate.end ();
6079 /* These are the Hashtable callback functions. */
6081 /* Returns true iff the types are equivalent. */
6083 bool
6084 type_cache_hasher::equal (type_hash *a, type_hash *b)
6086 /* First test the things that are the same for all types. */
6087 if (a->hash != b->hash
6088 || TREE_CODE (a->type) != TREE_CODE (b->type)
6089 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6090 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6091 TYPE_ATTRIBUTES (b->type))
6092 || (TREE_CODE (a->type) != COMPLEX_TYPE
6093 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6094 return false;
6096 /* Be careful about comparing arrays before and after the element type
6097 has been completed; don't compare TYPE_ALIGN unless both types are
6098 complete. */
6099 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6100 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6101 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6102 return false;
6104 switch (TREE_CODE (a->type))
6106 case VOID_TYPE:
6107 case OPAQUE_TYPE:
6108 case COMPLEX_TYPE:
6109 case POINTER_TYPE:
6110 case REFERENCE_TYPE:
6111 case NULLPTR_TYPE:
6112 return true;
6114 case VECTOR_TYPE:
6115 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6116 TYPE_VECTOR_SUBPARTS (b->type));
6118 case ENUMERAL_TYPE:
6119 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6120 && !(TYPE_VALUES (a->type)
6121 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6122 && TYPE_VALUES (b->type)
6123 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6124 && type_list_equal (TYPE_VALUES (a->type),
6125 TYPE_VALUES (b->type))))
6126 return false;
6128 /* fall through */
6130 case INTEGER_TYPE:
6131 case REAL_TYPE:
6132 case BOOLEAN_TYPE:
6133 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6134 return false;
6135 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6136 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6137 TYPE_MAX_VALUE (b->type)))
6138 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6139 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6140 TYPE_MIN_VALUE (b->type))));
6142 case BITINT_TYPE:
6143 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6144 return false;
6145 return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
6147 case FIXED_POINT_TYPE:
6148 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6150 case OFFSET_TYPE:
6151 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6153 case METHOD_TYPE:
6154 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6155 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6156 || (TYPE_ARG_TYPES (a->type)
6157 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6158 && TYPE_ARG_TYPES (b->type)
6159 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6160 && type_list_equal (TYPE_ARG_TYPES (a->type),
6161 TYPE_ARG_TYPES (b->type)))))
6162 break;
6163 return false;
6164 case ARRAY_TYPE:
6165 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6166 where the flag should be inherited from the element type
6167 and can change after ARRAY_TYPEs are created; on non-aggregates
6168 compare it and hash it, scalars will never have that flag set
6169 and we need to differentiate between arrays created by different
6170 front-ends or middle-end created arrays. */
6171 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6172 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6173 || (TYPE_TYPELESS_STORAGE (a->type)
6174 == TYPE_TYPELESS_STORAGE (b->type))));
6176 case RECORD_TYPE:
6177 case UNION_TYPE:
6178 case QUAL_UNION_TYPE:
6179 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6180 || (TYPE_FIELDS (a->type)
6181 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6182 && TYPE_FIELDS (b->type)
6183 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6184 && type_list_equal (TYPE_FIELDS (a->type),
6185 TYPE_FIELDS (b->type))));
6187 case FUNCTION_TYPE:
6188 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6189 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6190 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6191 || (TYPE_ARG_TYPES (a->type)
6192 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6193 && TYPE_ARG_TYPES (b->type)
6194 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6195 && type_list_equal (TYPE_ARG_TYPES (a->type),
6196 TYPE_ARG_TYPES (b->type))))
6197 break;
6198 return false;
6200 default:
6201 return false;
6204 if (lang_hooks.types.type_hash_eq != NULL)
6205 return lang_hooks.types.type_hash_eq (a->type, b->type);
6207 return true;
6210 /* Given TYPE, and HASHCODE its hash code, return the canonical
6211 object for an identical type if one already exists.
6212 Otherwise, return TYPE, and record it as the canonical object.
6214 To use this function, first create a type of the sort you want.
6215 Then compute its hash code from the fields of the type that
6216 make it different from other similar types.
6217 Then call this function and use the value. */
6219 tree
6220 type_hash_canon (unsigned int hashcode, tree type)
6222 type_hash in;
6223 type_hash **loc;
6225 /* The hash table only contains main variants, so ensure that's what we're
6226 being passed. */
6227 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6229 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6230 must call that routine before comparing TYPE_ALIGNs. */
6231 layout_type (type);
6233 in.hash = hashcode;
6234 in.type = type;
6236 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6237 if (*loc)
6239 tree t1 = ((type_hash *) *loc)->type;
6240 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6241 && t1 != type);
6242 if (TYPE_UID (type) + 1 == next_type_uid)
6243 --next_type_uid;
6244 /* Free also min/max values and the cache for integer
6245 types. This can't be done in free_node, as LTO frees
6246 those on its own. */
6247 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
6249 if (TYPE_MIN_VALUE (type)
6250 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6252 /* Zero is always in TYPE_CACHED_VALUES. */
6253 if (! TYPE_UNSIGNED (type))
6254 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6255 ggc_free (TYPE_MIN_VALUE (type));
6257 if (TYPE_MAX_VALUE (type)
6258 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6260 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6261 ggc_free (TYPE_MAX_VALUE (type));
6263 if (TYPE_CACHED_VALUES_P (type))
6264 ggc_free (TYPE_CACHED_VALUES (type));
6266 free_node (type);
6267 return t1;
6269 else
6271 struct type_hash *h;
6273 h = ggc_alloc<type_hash> ();
6274 h->hash = hashcode;
6275 h->type = type;
6276 *loc = h;
6278 return type;
6282 static void
6283 print_type_hash_statistics (void)
6285 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6286 (long) type_hash_table->size (),
6287 (long) type_hash_table->elements (),
6288 type_hash_table->collisions ());
6291 /* Given two lists of types
6292 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6293 return 1 if the lists contain the same types in the same order.
6294 Also, the TREE_PURPOSEs must match. */
6296 bool
6297 type_list_equal (const_tree l1, const_tree l2)
6299 const_tree t1, t2;
6301 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6302 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6303 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6304 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6305 && (TREE_TYPE (TREE_PURPOSE (t1))
6306 == TREE_TYPE (TREE_PURPOSE (t2))))))
6307 return false;
6309 return t1 == t2;
6312 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6313 given by TYPE. If the argument list accepts variable arguments,
6314 then this function counts only the ordinary arguments. */
6317 type_num_arguments (const_tree fntype)
6319 int i = 0;
6321 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6322 /* If the function does not take a variable number of arguments,
6323 the last element in the list will have type `void'. */
6324 if (VOID_TYPE_P (TREE_VALUE (t)))
6325 break;
6326 else
6327 ++i;
6329 return i;
6332 /* Return the type of the function TYPE's argument ARGNO if known.
6333 For vararg function's where ARGNO refers to one of the variadic
6334 arguments return null. Otherwise, return a void_type_node for
6335 out-of-bounds ARGNO. */
6337 tree
6338 type_argument_type (const_tree fntype, unsigned argno)
6340 /* Treat zero the same as an out-of-bounds argument number. */
6341 if (!argno)
6342 return void_type_node;
6344 function_args_iterator iter;
6346 tree argtype;
6347 unsigned i = 1;
6348 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6350 /* A vararg function's argument list ends in a null. Otherwise,
6351 an ordinary function's argument list ends with void. Return
6352 null if ARGNO refers to a vararg argument, void_type_node if
6353 it's out of bounds, and the formal argument type otherwise. */
6354 if (!argtype)
6355 break;
6357 if (i == argno || VOID_TYPE_P (argtype))
6358 return argtype;
6360 ++i;
6363 return NULL_TREE;
6366 /* True if integer constants T1 and T2
6367 represent the same constant value. */
6369 bool
6370 tree_int_cst_equal (const_tree t1, const_tree t2)
6372 if (t1 == t2)
6373 return true;
6375 if (t1 == 0 || t2 == 0)
6376 return false;
6378 STRIP_ANY_LOCATION_WRAPPER (t1);
6379 STRIP_ANY_LOCATION_WRAPPER (t2);
6381 if (TREE_CODE (t1) == INTEGER_CST
6382 && TREE_CODE (t2) == INTEGER_CST
6383 && wi::to_widest (t1) == wi::to_widest (t2))
6384 return true;
6386 return false;
6389 /* Return true if T is an INTEGER_CST whose numerical value (extended
6390 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6392 bool
6393 tree_fits_shwi_p (const_tree t)
6395 return (t != NULL_TREE
6396 && TREE_CODE (t) == INTEGER_CST
6397 && wi::fits_shwi_p (wi::to_widest (t)));
6400 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6401 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6403 bool
6404 tree_fits_poly_int64_p (const_tree t)
6406 if (t == NULL_TREE)
6407 return false;
6408 if (POLY_INT_CST_P (t))
6410 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6411 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6412 return false;
6413 return true;
6415 return (TREE_CODE (t) == INTEGER_CST
6416 && wi::fits_shwi_p (wi::to_widest (t)));
6419 /* Return true if T is an INTEGER_CST whose numerical value (extended
6420 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6422 bool
6423 tree_fits_uhwi_p (const_tree t)
6425 return (t != NULL_TREE
6426 && TREE_CODE (t) == INTEGER_CST
6427 && wi::fits_uhwi_p (wi::to_widest (t)));
6430 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6431 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6433 bool
6434 tree_fits_poly_uint64_p (const_tree t)
6436 if (t == NULL_TREE)
6437 return false;
6438 if (POLY_INT_CST_P (t))
6440 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6441 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6442 return false;
6443 return true;
6445 return (TREE_CODE (t) == INTEGER_CST
6446 && wi::fits_uhwi_p (wi::to_widest (t)));
6449 /* T is an INTEGER_CST whose numerical value (extended according to
6450 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6451 HOST_WIDE_INT. */
6453 HOST_WIDE_INT
6454 tree_to_shwi (const_tree t)
6456 gcc_assert (tree_fits_shwi_p (t));
6457 return TREE_INT_CST_LOW (t);
6460 /* T is an INTEGER_CST whose numerical value (extended according to
6461 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6462 HOST_WIDE_INT. */
6464 unsigned HOST_WIDE_INT
6465 tree_to_uhwi (const_tree t)
6467 gcc_assert (tree_fits_uhwi_p (t));
6468 return TREE_INT_CST_LOW (t);
6471 /* Return the most significant (sign) bit of T. */
6474 tree_int_cst_sign_bit (const_tree t)
6476 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6478 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6481 /* Return an indication of the sign of the integer constant T.
6482 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6483 Note that -1 will never be returned if T's type is unsigned. */
6486 tree_int_cst_sgn (const_tree t)
6488 if (wi::to_wide (t) == 0)
6489 return 0;
6490 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6491 return 1;
6492 else if (wi::neg_p (wi::to_wide (t)))
6493 return -1;
6494 else
6495 return 1;
6498 /* Return the minimum number of bits needed to represent VALUE in a
6499 signed or unsigned type, UNSIGNEDP says which. */
6501 unsigned int
6502 tree_int_cst_min_precision (tree value, signop sgn)
6504 /* If the value is negative, compute its negative minus 1. The latter
6505 adjustment is because the absolute value of the largest negative value
6506 is one larger than the largest positive value. This is equivalent to
6507 a bit-wise negation, so use that operation instead. */
6509 if (tree_int_cst_sgn (value) < 0)
6510 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6512 /* Return the number of bits needed, taking into account the fact
6513 that we need one more bit for a signed than unsigned type.
6514 If value is 0 or -1, the minimum precision is 1 no matter
6515 whether unsignedp is true or false. */
6517 if (integer_zerop (value))
6518 return 1;
6519 else
6520 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6523 /* Return truthvalue of whether T1 is the same tree structure as T2.
6524 Return 1 if they are the same.
6525 Return 0 if they are understandably different.
6526 Return -1 if either contains tree structure not understood by
6527 this function. */
6530 simple_cst_equal (const_tree t1, const_tree t2)
6532 enum tree_code code1, code2;
6533 int cmp;
6534 int i;
6536 if (t1 == t2)
6537 return 1;
6538 if (t1 == 0 || t2 == 0)
6539 return 0;
6541 /* For location wrappers to be the same, they must be at the same
6542 source location (and wrap the same thing). */
6543 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6545 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6546 return 0;
6547 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6550 code1 = TREE_CODE (t1);
6551 code2 = TREE_CODE (t2);
6553 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6555 if (CONVERT_EXPR_CODE_P (code2)
6556 || code2 == NON_LVALUE_EXPR)
6557 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6558 else
6559 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6562 else if (CONVERT_EXPR_CODE_P (code2)
6563 || code2 == NON_LVALUE_EXPR)
6564 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6566 if (code1 != code2)
6567 return 0;
6569 switch (code1)
6571 case INTEGER_CST:
6572 return wi::to_widest (t1) == wi::to_widest (t2);
6574 case REAL_CST:
6575 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6577 case FIXED_CST:
6578 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6580 case STRING_CST:
6581 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6582 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6583 TREE_STRING_LENGTH (t1)));
6585 case CONSTRUCTOR:
6587 unsigned HOST_WIDE_INT idx;
6588 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6589 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6591 if (vec_safe_length (v1) != vec_safe_length (v2))
6592 return false;
6594 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6595 /* ??? Should we handle also fields here? */
6596 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6597 return false;
6598 return true;
6601 case SAVE_EXPR:
6602 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6604 case CALL_EXPR:
6605 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6606 if (cmp <= 0)
6607 return cmp;
6608 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6609 return 0;
6611 const_tree arg1, arg2;
6612 const_call_expr_arg_iterator iter1, iter2;
6613 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6614 arg2 = first_const_call_expr_arg (t2, &iter2);
6615 arg1 && arg2;
6616 arg1 = next_const_call_expr_arg (&iter1),
6617 arg2 = next_const_call_expr_arg (&iter2))
6619 cmp = simple_cst_equal (arg1, arg2);
6620 if (cmp <= 0)
6621 return cmp;
6623 return arg1 == arg2;
6626 case TARGET_EXPR:
6627 /* Special case: if either target is an unallocated VAR_DECL,
6628 it means that it's going to be unified with whatever the
6629 TARGET_EXPR is really supposed to initialize, so treat it
6630 as being equivalent to anything. */
6631 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6632 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6633 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6634 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6635 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6636 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6637 cmp = 1;
6638 else
6639 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6641 if (cmp <= 0)
6642 return cmp;
6644 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6646 case WITH_CLEANUP_EXPR:
6647 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6648 if (cmp <= 0)
6649 return cmp;
6651 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6653 case COMPONENT_REF:
6654 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6655 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6657 return 0;
6659 case VAR_DECL:
6660 case PARM_DECL:
6661 case CONST_DECL:
6662 case FUNCTION_DECL:
6663 return 0;
6665 default:
6666 if (POLY_INT_CST_P (t1))
6667 /* A false return means maybe_ne rather than known_ne. */
6668 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6669 TYPE_SIGN (TREE_TYPE (t1))),
6670 poly_widest_int::from (poly_int_cst_value (t2),
6671 TYPE_SIGN (TREE_TYPE (t2))));
6672 break;
6675 /* This general rule works for most tree codes. All exceptions should be
6676 handled above. If this is a language-specific tree code, we can't
6677 trust what might be in the operand, so say we don't know
6678 the situation. */
6679 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6680 return -1;
6682 switch (TREE_CODE_CLASS (code1))
6684 case tcc_unary:
6685 case tcc_binary:
6686 case tcc_comparison:
6687 case tcc_expression:
6688 case tcc_reference:
6689 case tcc_statement:
6690 cmp = 1;
6691 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6693 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6694 if (cmp <= 0)
6695 return cmp;
6698 return cmp;
6700 default:
6701 return -1;
6705 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6706 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6707 than U, respectively. */
6710 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6712 if (tree_int_cst_sgn (t) < 0)
6713 return -1;
6714 else if (!tree_fits_uhwi_p (t))
6715 return 1;
6716 else if (TREE_INT_CST_LOW (t) == u)
6717 return 0;
6718 else if (TREE_INT_CST_LOW (t) < u)
6719 return -1;
6720 else
6721 return 1;
6724 /* Return true if SIZE represents a constant size that is in bounds of
6725 what the middle-end and the backend accepts (covering not more than
6726 half of the address-space).
6727 When PERR is non-null, set *PERR on failure to the description of
6728 why SIZE is not valid. */
6730 bool
6731 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6733 if (POLY_INT_CST_P (size))
6735 if (TREE_OVERFLOW (size))
6736 return false;
6737 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6738 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6739 return false;
6740 return true;
6743 cst_size_error error;
6744 if (!perr)
6745 perr = &error;
6747 if (TREE_CODE (size) != INTEGER_CST)
6749 *perr = cst_size_not_constant;
6750 return false;
6753 if (TREE_OVERFLOW_P (size))
6755 *perr = cst_size_overflow;
6756 return false;
6759 if (tree_int_cst_sgn (size) < 0)
6761 *perr = cst_size_negative;
6762 return false;
6764 if (!tree_fits_uhwi_p (size)
6765 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6766 < wi::to_widest (size) * 2))
6768 *perr = cst_size_too_big;
6769 return false;
6772 return true;
6775 /* Return the precision of the type, or for a complex or vector type the
6776 precision of the type of its elements. */
6778 unsigned int
6779 element_precision (const_tree type)
6781 if (!TYPE_P (type))
6782 type = TREE_TYPE (type);
6783 enum tree_code code = TREE_CODE (type);
6784 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6785 type = TREE_TYPE (type);
6787 return TYPE_PRECISION (type);
6790 /* Return true if CODE represents an associative tree code. Otherwise
6791 return false. */
6792 bool
6793 associative_tree_code (enum tree_code code)
6795 switch (code)
6797 case BIT_IOR_EXPR:
6798 case BIT_AND_EXPR:
6799 case BIT_XOR_EXPR:
6800 case PLUS_EXPR:
6801 case MULT_EXPR:
6802 case MIN_EXPR:
6803 case MAX_EXPR:
6804 return true;
6806 default:
6807 break;
6809 return false;
6812 /* Return true if CODE represents a commutative tree code. Otherwise
6813 return false. */
6814 bool
6815 commutative_tree_code (enum tree_code code)
6817 switch (code)
6819 case PLUS_EXPR:
6820 case MULT_EXPR:
6821 case MULT_HIGHPART_EXPR:
6822 case MIN_EXPR:
6823 case MAX_EXPR:
6824 case BIT_IOR_EXPR:
6825 case BIT_XOR_EXPR:
6826 case BIT_AND_EXPR:
6827 case NE_EXPR:
6828 case EQ_EXPR:
6829 case UNORDERED_EXPR:
6830 case ORDERED_EXPR:
6831 case UNEQ_EXPR:
6832 case LTGT_EXPR:
6833 case TRUTH_AND_EXPR:
6834 case TRUTH_XOR_EXPR:
6835 case TRUTH_OR_EXPR:
6836 case WIDEN_MULT_EXPR:
6837 case VEC_WIDEN_MULT_HI_EXPR:
6838 case VEC_WIDEN_MULT_LO_EXPR:
6839 case VEC_WIDEN_MULT_EVEN_EXPR:
6840 case VEC_WIDEN_MULT_ODD_EXPR:
6841 return true;
6843 default:
6844 break;
6846 return false;
6849 /* Return true if CODE represents a ternary tree code for which the
6850 first two operands are commutative. Otherwise return false. */
6851 bool
6852 commutative_ternary_tree_code (enum tree_code code)
6854 switch (code)
6856 case WIDEN_MULT_PLUS_EXPR:
6857 case WIDEN_MULT_MINUS_EXPR:
6858 case DOT_PROD_EXPR:
6859 return true;
6861 default:
6862 break;
6864 return false;
6867 /* Returns true if CODE can overflow. */
6869 bool
6870 operation_can_overflow (enum tree_code code)
6872 switch (code)
6874 case PLUS_EXPR:
6875 case MINUS_EXPR:
6876 case MULT_EXPR:
6877 case LSHIFT_EXPR:
6878 /* Can overflow in various ways. */
6879 return true;
6880 case TRUNC_DIV_EXPR:
6881 case EXACT_DIV_EXPR:
6882 case FLOOR_DIV_EXPR:
6883 case CEIL_DIV_EXPR:
6884 /* For INT_MIN / -1. */
6885 return true;
6886 case NEGATE_EXPR:
6887 case ABS_EXPR:
6888 /* For -INT_MIN. */
6889 return true;
6890 default:
6891 /* These operators cannot overflow. */
6892 return false;
6896 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6897 ftrapv doesn't generate trapping insns for CODE. */
6899 bool
6900 operation_no_trapping_overflow (tree type, enum tree_code code)
6902 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6904 /* We don't generate instructions that trap on overflow for complex or vector
6905 types. */
6906 if (!INTEGRAL_TYPE_P (type))
6907 return true;
6909 if (!TYPE_OVERFLOW_TRAPS (type))
6910 return true;
6912 switch (code)
6914 case PLUS_EXPR:
6915 case MINUS_EXPR:
6916 case MULT_EXPR:
6917 case NEGATE_EXPR:
6918 case ABS_EXPR:
6919 /* These operators can overflow, and -ftrapv generates trapping code for
6920 these. */
6921 return false;
6922 case TRUNC_DIV_EXPR:
6923 case EXACT_DIV_EXPR:
6924 case FLOOR_DIV_EXPR:
6925 case CEIL_DIV_EXPR:
6926 case LSHIFT_EXPR:
6927 /* These operators can overflow, but -ftrapv does not generate trapping
6928 code for these. */
6929 return true;
6930 default:
6931 /* These operators cannot overflow. */
6932 return true;
6936 /* Constructors for pointer, array and function types.
6937 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6938 constructed by language-dependent code, not here.) */
6940 /* Construct, lay out and return the type of pointers to TO_TYPE with
6941 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6942 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6943 indicate this type can reference all of memory. If such a type has
6944 already been constructed, reuse it. */
6946 tree
6947 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6948 bool can_alias_all)
6950 tree t;
6951 bool could_alias = can_alias_all;
6953 if (to_type == error_mark_node)
6954 return error_mark_node;
6956 if (mode == VOIDmode)
6958 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6959 mode = targetm.addr_space.pointer_mode (as);
6962 /* If the pointed-to type has the may_alias attribute set, force
6963 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6964 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6965 can_alias_all = true;
6967 /* In some cases, languages will have things that aren't a POINTER_TYPE
6968 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6969 In that case, return that type without regard to the rest of our
6970 operands.
6972 ??? This is a kludge, but consistent with the way this function has
6973 always operated and there doesn't seem to be a good way to avoid this
6974 at the moment. */
6975 if (TYPE_POINTER_TO (to_type) != 0
6976 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6977 return TYPE_POINTER_TO (to_type);
6979 /* First, if we already have a type for pointers to TO_TYPE and it's
6980 the proper mode, use it. */
6981 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6982 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6983 return t;
6985 t = make_node (POINTER_TYPE);
6987 TREE_TYPE (t) = to_type;
6988 SET_TYPE_MODE (t, mode);
6989 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6990 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6991 TYPE_POINTER_TO (to_type) = t;
6993 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6994 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6995 SET_TYPE_STRUCTURAL_EQUALITY (t);
6996 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6997 TYPE_CANONICAL (t)
6998 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6999 mode, false);
7001 /* Lay out the type. This function has many callers that are concerned
7002 with expression-construction, and this simplifies them all. */
7003 layout_type (t);
7005 return t;
7008 /* By default build pointers in ptr_mode. */
7010 tree
7011 build_pointer_type (tree to_type)
7013 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7016 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7018 tree
7019 build_reference_type_for_mode (tree to_type, machine_mode mode,
7020 bool can_alias_all)
7022 tree t;
7023 bool could_alias = can_alias_all;
7025 if (to_type == error_mark_node)
7026 return error_mark_node;
7028 if (mode == VOIDmode)
7030 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7031 mode = targetm.addr_space.pointer_mode (as);
7034 /* If the pointed-to type has the may_alias attribute set, force
7035 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7036 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7037 can_alias_all = true;
7039 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7040 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7041 In that case, return that type without regard to the rest of our
7042 operands.
7044 ??? This is a kludge, but consistent with the way this function has
7045 always operated and there doesn't seem to be a good way to avoid this
7046 at the moment. */
7047 if (TYPE_REFERENCE_TO (to_type) != 0
7048 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7049 return TYPE_REFERENCE_TO (to_type);
7051 /* First, if we already have a type for pointers to TO_TYPE and it's
7052 the proper mode, use it. */
7053 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7054 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7055 return t;
7057 t = make_node (REFERENCE_TYPE);
7059 TREE_TYPE (t) = to_type;
7060 SET_TYPE_MODE (t, mode);
7061 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7062 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7063 TYPE_REFERENCE_TO (to_type) = t;
7065 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7066 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7067 SET_TYPE_STRUCTURAL_EQUALITY (t);
7068 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7069 TYPE_CANONICAL (t)
7070 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7071 mode, false);
7073 layout_type (t);
7075 return t;
7079 /* Build the node for the type of references-to-TO_TYPE by default
7080 in ptr_mode. */
7082 tree
7083 build_reference_type (tree to_type)
7085 return build_reference_type_for_mode (to_type, VOIDmode, false);
7088 #define MAX_INT_CACHED_PREC \
7089 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7090 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7092 static void
7093 clear_nonstandard_integer_type_cache (void)
7095 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7097 nonstandard_integer_type_cache[i] = NULL;
7101 /* Builds a signed or unsigned integer type of precision PRECISION.
7102 Used for C bitfields whose precision does not match that of
7103 built-in target types. */
7104 tree
7105 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7106 int unsignedp)
7108 tree itype, ret;
7110 if (unsignedp)
7111 unsignedp = MAX_INT_CACHED_PREC + 1;
7113 if (precision <= MAX_INT_CACHED_PREC)
7115 itype = nonstandard_integer_type_cache[precision + unsignedp];
7116 if (itype)
7117 return itype;
7120 itype = make_node (INTEGER_TYPE);
7121 TYPE_PRECISION (itype) = precision;
7123 if (unsignedp)
7124 fixup_unsigned_type (itype);
7125 else
7126 fixup_signed_type (itype);
7128 inchash::hash hstate;
7129 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7130 ret = type_hash_canon (hstate.end (), itype);
7131 if (precision <= MAX_INT_CACHED_PREC)
7132 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7134 return ret;
7137 #define MAX_BOOL_CACHED_PREC \
7138 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7139 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7141 /* Builds a boolean type of precision PRECISION.
7142 Used for boolean vectors to choose proper vector element size. */
7143 tree
7144 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7146 tree type;
7148 if (precision <= MAX_BOOL_CACHED_PREC)
7150 type = nonstandard_boolean_type_cache[precision];
7151 if (type)
7152 return type;
7155 type = make_node (BOOLEAN_TYPE);
7156 TYPE_PRECISION (type) = precision;
7157 fixup_signed_type (type);
7159 if (precision <= MAX_INT_CACHED_PREC)
7160 nonstandard_boolean_type_cache[precision] = type;
7162 return type;
7165 static GTY(()) vec<tree, va_gc> *bitint_type_cache;
7167 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7168 tree
7169 build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
7171 tree itype, ret;
7173 gcc_checking_assert (precision >= 1 + !unsignedp);
7175 if (unsignedp)
7176 unsignedp = MAX_INT_CACHED_PREC + 1;
7178 if (bitint_type_cache == NULL)
7179 vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
7181 if (precision <= MAX_INT_CACHED_PREC)
7183 itype = (*bitint_type_cache)[precision + unsignedp];
7184 if (itype)
7185 return itype;
7188 itype = make_node (BITINT_TYPE);
7189 TYPE_PRECISION (itype) = precision;
7191 if (unsignedp)
7192 fixup_unsigned_type (itype);
7193 else
7194 fixup_signed_type (itype);
7196 inchash::hash hstate;
7197 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7198 ret = type_hash_canon (hstate.end (), itype);
7199 if (precision <= MAX_INT_CACHED_PREC)
7200 (*bitint_type_cache)[precision + unsignedp] = ret;
7202 return ret;
7205 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7206 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7207 is true, reuse such a type that has already been constructed. */
7209 static tree
7210 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7212 tree itype = make_node (INTEGER_TYPE);
7214 TREE_TYPE (itype) = type;
7216 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7217 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7219 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7220 SET_TYPE_MODE (itype, TYPE_MODE (type));
7221 TYPE_SIZE (itype) = TYPE_SIZE (type);
7222 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7223 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7224 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7225 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7227 if (!shared)
7228 return itype;
7230 if ((TYPE_MIN_VALUE (itype)
7231 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7232 || (TYPE_MAX_VALUE (itype)
7233 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7235 /* Since we cannot reliably merge this type, we need to compare it using
7236 structural equality checks. */
7237 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7238 return itype;
7241 hashval_t hash = type_hash_canon_hash (itype);
7242 itype = type_hash_canon (hash, itype);
7244 return itype;
7247 /* Wrapper around build_range_type_1 with SHARED set to true. */
7249 tree
7250 build_range_type (tree type, tree lowval, tree highval)
7252 return build_range_type_1 (type, lowval, highval, true);
7255 /* Wrapper around build_range_type_1 with SHARED set to false. */
7257 tree
7258 build_nonshared_range_type (tree type, tree lowval, tree highval)
7260 return build_range_type_1 (type, lowval, highval, false);
7263 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7264 MAXVAL should be the maximum value in the domain
7265 (one less than the length of the array).
7267 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7268 We don't enforce this limit, that is up to caller (e.g. language front end).
7269 The limit exists because the result is a signed type and we don't handle
7270 sizes that use more than one HOST_WIDE_INT. */
7272 tree
7273 build_index_type (tree maxval)
7275 return build_range_type (sizetype, size_zero_node, maxval);
7278 /* Return true if the debug information for TYPE, a subtype, should be emitted
7279 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7280 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7281 debug info and doesn't reflect the source code. */
7283 bool
7284 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7286 tree base_type = TREE_TYPE (type), low, high;
7288 /* Subrange types have a base type which is an integral type. */
7289 if (!INTEGRAL_TYPE_P (base_type))
7290 return false;
7292 /* Get the real bounds of the subtype. */
7293 if (lang_hooks.types.get_subrange_bounds)
7294 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7295 else
7297 low = TYPE_MIN_VALUE (type);
7298 high = TYPE_MAX_VALUE (type);
7301 /* If the type and its base type have the same representation and the same
7302 name, then the type is not a subrange but a copy of the base type. */
7303 if ((TREE_CODE (base_type) == INTEGER_TYPE
7304 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7305 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7306 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7307 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7308 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7309 return false;
7311 if (lowval)
7312 *lowval = low;
7313 if (highval)
7314 *highval = high;
7315 return true;
7318 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7319 and number of elements specified by the range of values of INDEX_TYPE.
7320 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7321 If SHARED is true, reuse such a type that has already been constructed.
7322 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7324 tree
7325 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7326 bool shared, bool set_canonical)
7328 tree t;
7330 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7332 error ("arrays of functions are not meaningful");
7333 elt_type = integer_type_node;
7336 t = make_node (ARRAY_TYPE);
7337 TREE_TYPE (t) = elt_type;
7338 TYPE_DOMAIN (t) = index_type;
7339 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7340 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7341 layout_type (t);
7343 if (shared)
7345 hashval_t hash = type_hash_canon_hash (t);
7346 t = type_hash_canon (hash, t);
7349 if (TYPE_CANONICAL (t) == t && set_canonical)
7351 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7352 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7353 || in_lto_p)
7354 SET_TYPE_STRUCTURAL_EQUALITY (t);
7355 else if (TYPE_CANONICAL (elt_type) != elt_type
7356 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7357 TYPE_CANONICAL (t)
7358 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7359 index_type
7360 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7361 typeless_storage, shared, set_canonical);
7364 return t;
7367 /* Wrapper around build_array_type_1 with SHARED set to true. */
7369 tree
7370 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7372 return
7373 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7376 /* Wrapper around build_array_type_1 with SHARED set to false. */
7378 tree
7379 build_nonshared_array_type (tree elt_type, tree index_type)
7381 return build_array_type_1 (elt_type, index_type, false, false, true);
7384 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7385 sizetype. */
7387 tree
7388 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7390 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7393 /* Computes the canonical argument types from the argument type list
7394 ARGTYPES.
7396 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7397 on entry to this function, or if any of the ARGTYPES are
7398 structural.
7400 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7401 true on entry to this function, or if any of the ARGTYPES are
7402 non-canonical.
7404 Returns a canonical argument list, which may be ARGTYPES when the
7405 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7406 true) or would not differ from ARGTYPES. */
7408 static tree
7409 maybe_canonicalize_argtypes (tree argtypes,
7410 bool *any_structural_p,
7411 bool *any_noncanonical_p)
7413 tree arg;
7414 bool any_noncanonical_argtypes_p = false;
7416 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7418 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7419 /* Fail gracefully by stating that the type is structural. */
7420 *any_structural_p = true;
7421 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7422 *any_structural_p = true;
7423 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7424 || TREE_PURPOSE (arg))
7425 /* If the argument has a default argument, we consider it
7426 non-canonical even though the type itself is canonical.
7427 That way, different variants of function and method types
7428 with default arguments will all point to the variant with
7429 no defaults as their canonical type. */
7430 any_noncanonical_argtypes_p = true;
7433 if (*any_structural_p)
7434 return argtypes;
7436 if (any_noncanonical_argtypes_p)
7438 /* Build the canonical list of argument types. */
7439 tree canon_argtypes = NULL_TREE;
7440 bool is_void = false;
7442 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7444 if (arg == void_list_node)
7445 is_void = true;
7446 else
7447 canon_argtypes = tree_cons (NULL_TREE,
7448 TYPE_CANONICAL (TREE_VALUE (arg)),
7449 canon_argtypes);
7452 canon_argtypes = nreverse (canon_argtypes);
7453 if (is_void)
7454 canon_argtypes = chainon (canon_argtypes, void_list_node);
7456 /* There is a non-canonical type. */
7457 *any_noncanonical_p = true;
7458 return canon_argtypes;
7461 /* The canonical argument types are the same as ARGTYPES. */
7462 return argtypes;
7465 /* Construct, lay out and return
7466 the type of functions returning type VALUE_TYPE
7467 given arguments of types ARG_TYPES.
7468 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7469 are data type nodes for the arguments of the function.
7470 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7471 variable-arguments function with (...) prototype (no named arguments).
7472 If such a type has already been constructed, reuse it. */
7474 tree
7475 build_function_type (tree value_type, tree arg_types,
7476 bool no_named_args_stdarg_p)
7478 tree t;
7479 inchash::hash hstate;
7480 bool any_structural_p, any_noncanonical_p;
7481 tree canon_argtypes;
7483 gcc_assert (arg_types != error_mark_node);
7485 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7487 error ("function return type cannot be function");
7488 value_type = integer_type_node;
7491 /* Make a node of the sort we want. */
7492 t = make_node (FUNCTION_TYPE);
7493 TREE_TYPE (t) = value_type;
7494 TYPE_ARG_TYPES (t) = arg_types;
7495 if (no_named_args_stdarg_p)
7497 gcc_assert (arg_types == NULL_TREE);
7498 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7501 /* If we already have such a type, use the old one. */
7502 hashval_t hash = type_hash_canon_hash (t);
7503 t = type_hash_canon (hash, t);
7505 /* Set up the canonical type. */
7506 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7507 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7508 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7509 &any_structural_p,
7510 &any_noncanonical_p);
7511 if (any_structural_p)
7512 SET_TYPE_STRUCTURAL_EQUALITY (t);
7513 else if (any_noncanonical_p)
7514 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7515 canon_argtypes);
7517 if (!COMPLETE_TYPE_P (t))
7518 layout_type (t);
7519 return t;
7522 /* Build a function type. The RETURN_TYPE is the type returned by the
7523 function. If VAARGS is set, no void_type_node is appended to the
7524 list. ARGP must be always be terminated be a NULL_TREE. */
7526 static tree
7527 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7529 tree t, args, last;
7531 t = va_arg (argp, tree);
7532 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7533 args = tree_cons (NULL_TREE, t, args);
7535 if (vaargs)
7537 last = args;
7538 if (args != NULL_TREE)
7539 args = nreverse (args);
7540 gcc_assert (last != void_list_node);
7542 else if (args == NULL_TREE)
7543 args = void_list_node;
7544 else
7546 last = args;
7547 args = nreverse (args);
7548 TREE_CHAIN (last) = void_list_node;
7550 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7552 return args;
7555 /* Build a function type. The RETURN_TYPE is the type returned by the
7556 function. If additional arguments are provided, they are
7557 additional argument types. The list of argument types must always
7558 be terminated by NULL_TREE. */
7560 tree
7561 build_function_type_list (tree return_type, ...)
7563 tree args;
7564 va_list p;
7566 va_start (p, return_type);
7567 args = build_function_type_list_1 (false, return_type, p);
7568 va_end (p);
7569 return args;
7572 /* Build a variable argument function type. The RETURN_TYPE is the
7573 type returned by the function. If additional arguments are provided,
7574 they are additional argument types. The list of argument types must
7575 always be terminated by NULL_TREE. */
7577 tree
7578 build_varargs_function_type_list (tree return_type, ...)
7580 tree args;
7581 va_list p;
7583 va_start (p, return_type);
7584 args = build_function_type_list_1 (true, return_type, p);
7585 va_end (p);
7587 return args;
7590 /* Build a function type. RETURN_TYPE is the type returned by the
7591 function; VAARGS indicates whether the function takes varargs. The
7592 function takes N named arguments, the types of which are provided in
7593 ARG_TYPES. */
7595 static tree
7596 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7597 tree *arg_types)
7599 int i;
7600 tree t = vaargs ? NULL_TREE : void_list_node;
7602 for (i = n - 1; i >= 0; i--)
7603 t = tree_cons (NULL_TREE, arg_types[i], t);
7605 return build_function_type (return_type, t, vaargs && n == 0);
7608 /* Build a function type. RETURN_TYPE is the type returned by the
7609 function. The function takes N named arguments, the types of which
7610 are provided in ARG_TYPES. */
7612 tree
7613 build_function_type_array (tree return_type, int n, tree *arg_types)
7615 return build_function_type_array_1 (false, return_type, n, arg_types);
7618 /* Build a variable argument function type. RETURN_TYPE is the type
7619 returned by the function. The function takes N named arguments, the
7620 types of which are provided in ARG_TYPES. */
7622 tree
7623 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7625 return build_function_type_array_1 (true, return_type, n, arg_types);
7628 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7629 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7630 for the method. An implicit additional parameter (of type
7631 pointer-to-BASETYPE) is added to the ARGTYPES. */
7633 tree
7634 build_method_type_directly (tree basetype,
7635 tree rettype,
7636 tree argtypes)
7638 tree t;
7639 tree ptype;
7640 bool any_structural_p, any_noncanonical_p;
7641 tree canon_argtypes;
7643 /* Make a node of the sort we want. */
7644 t = make_node (METHOD_TYPE);
7646 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7647 TREE_TYPE (t) = rettype;
7648 ptype = build_pointer_type (basetype);
7650 /* The actual arglist for this function includes a "hidden" argument
7651 which is "this". Put it into the list of argument types. */
7652 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7653 TYPE_ARG_TYPES (t) = argtypes;
7655 /* If we already have such a type, use the old one. */
7656 hashval_t hash = type_hash_canon_hash (t);
7657 t = type_hash_canon (hash, t);
7659 /* Set up the canonical type. */
7660 any_structural_p
7661 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7662 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7663 any_noncanonical_p
7664 = (TYPE_CANONICAL (basetype) != basetype
7665 || TYPE_CANONICAL (rettype) != rettype);
7666 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7667 &any_structural_p,
7668 &any_noncanonical_p);
7669 if (any_structural_p)
7670 SET_TYPE_STRUCTURAL_EQUALITY (t);
7671 else if (any_noncanonical_p)
7672 TYPE_CANONICAL (t)
7673 = build_method_type_directly (TYPE_CANONICAL (basetype),
7674 TYPE_CANONICAL (rettype),
7675 canon_argtypes);
7676 if (!COMPLETE_TYPE_P (t))
7677 layout_type (t);
7679 return t;
7682 /* Construct, lay out and return the type of methods belonging to class
7683 BASETYPE and whose arguments and values are described by TYPE.
7684 If that type exists already, reuse it.
7685 TYPE must be a FUNCTION_TYPE node. */
7687 tree
7688 build_method_type (tree basetype, tree type)
7690 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7692 return build_method_type_directly (basetype,
7693 TREE_TYPE (type),
7694 TYPE_ARG_TYPES (type));
7697 /* Construct, lay out and return the type of offsets to a value
7698 of type TYPE, within an object of type BASETYPE.
7699 If a suitable offset type exists already, reuse it. */
7701 tree
7702 build_offset_type (tree basetype, tree type)
7704 tree t;
7706 /* Make a node of the sort we want. */
7707 t = make_node (OFFSET_TYPE);
7709 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7710 TREE_TYPE (t) = type;
7712 /* If we already have such a type, use the old one. */
7713 hashval_t hash = type_hash_canon_hash (t);
7714 t = type_hash_canon (hash, t);
7716 if (!COMPLETE_TYPE_P (t))
7717 layout_type (t);
7719 if (TYPE_CANONICAL (t) == t)
7721 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7722 || TYPE_STRUCTURAL_EQUALITY_P (type))
7723 SET_TYPE_STRUCTURAL_EQUALITY (t);
7724 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7725 || TYPE_CANONICAL (type) != type)
7726 TYPE_CANONICAL (t)
7727 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7728 TYPE_CANONICAL (type));
7731 return t;
7734 /* Create a complex type whose components are COMPONENT_TYPE.
7736 If NAMED is true, the type is given a TYPE_NAME. We do not always
7737 do so because this creates a DECL node and thus make the DECL_UIDs
7738 dependent on the type canonicalization hashtable, which is GC-ed,
7739 so the DECL_UIDs would not be stable wrt garbage collection. */
7741 tree
7742 build_complex_type (tree component_type, bool named)
7744 gcc_assert (INTEGRAL_TYPE_P (component_type)
7745 || SCALAR_FLOAT_TYPE_P (component_type)
7746 || FIXED_POINT_TYPE_P (component_type));
7748 /* Make a node of the sort we want. */
7749 tree probe = make_node (COMPLEX_TYPE);
7751 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7753 /* If we already have such a type, use the old one. */
7754 hashval_t hash = type_hash_canon_hash (probe);
7755 tree t = type_hash_canon (hash, probe);
7757 if (t == probe)
7759 /* We created a new type. The hash insertion will have laid
7760 out the type. We need to check the canonicalization and
7761 maybe set the name. */
7762 gcc_checking_assert (COMPLETE_TYPE_P (t)
7763 && !TYPE_NAME (t)
7764 && TYPE_CANONICAL (t) == t);
7766 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7767 SET_TYPE_STRUCTURAL_EQUALITY (t);
7768 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7769 TYPE_CANONICAL (t)
7770 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7772 /* We need to create a name, since complex is a fundamental type. */
7773 if (named)
7775 const char *name = NULL;
7777 if (TREE_TYPE (t) == char_type_node)
7778 name = "complex char";
7779 else if (TREE_TYPE (t) == signed_char_type_node)
7780 name = "complex signed char";
7781 else if (TREE_TYPE (t) == unsigned_char_type_node)
7782 name = "complex unsigned char";
7783 else if (TREE_TYPE (t) == short_integer_type_node)
7784 name = "complex short int";
7785 else if (TREE_TYPE (t) == short_unsigned_type_node)
7786 name = "complex short unsigned int";
7787 else if (TREE_TYPE (t) == integer_type_node)
7788 name = "complex int";
7789 else if (TREE_TYPE (t) == unsigned_type_node)
7790 name = "complex unsigned int";
7791 else if (TREE_TYPE (t) == long_integer_type_node)
7792 name = "complex long int";
7793 else if (TREE_TYPE (t) == long_unsigned_type_node)
7794 name = "complex long unsigned int";
7795 else if (TREE_TYPE (t) == long_long_integer_type_node)
7796 name = "complex long long int";
7797 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7798 name = "complex long long unsigned int";
7800 if (name != NULL)
7801 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7802 get_identifier (name), t);
7806 return build_qualified_type (t, TYPE_QUALS (component_type));
7809 /* If TYPE is a real or complex floating-point type and the target
7810 does not directly support arithmetic on TYPE then return the wider
7811 type to be used for arithmetic on TYPE. Otherwise, return
7812 NULL_TREE. */
7814 tree
7815 excess_precision_type (tree type)
7817 /* The target can give two different responses to the question of
7818 which excess precision mode it would like depending on whether we
7819 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7821 enum excess_precision_type requested_type
7822 = (flag_excess_precision == EXCESS_PRECISION_FAST
7823 ? EXCESS_PRECISION_TYPE_FAST
7824 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7825 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7827 enum flt_eval_method target_flt_eval_method
7828 = targetm.c.excess_precision (requested_type);
7830 /* The target should not ask for unpredictable float evaluation (though
7831 it might advertise that implicitly the evaluation is unpredictable,
7832 but we don't care about that here, it will have been reported
7833 elsewhere). If it does ask for unpredictable evaluation, we have
7834 nothing to do here. */
7835 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7837 /* Nothing to do. The target has asked for all types we know about
7838 to be computed with their native precision and range. */
7839 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7840 return NULL_TREE;
7842 /* The target will promote this type in a target-dependent way, so excess
7843 precision ought to leave it alone. */
7844 if (targetm.promoted_type (type) != NULL_TREE)
7845 return NULL_TREE;
7847 machine_mode float16_type_mode = (float16_type_node
7848 ? TYPE_MODE (float16_type_node)
7849 : VOIDmode);
7850 machine_mode bfloat16_type_mode = (bfloat16_type_node
7851 ? TYPE_MODE (bfloat16_type_node)
7852 : VOIDmode);
7853 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7854 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7856 switch (TREE_CODE (type))
7858 case REAL_TYPE:
7860 machine_mode type_mode = TYPE_MODE (type);
7861 switch (target_flt_eval_method)
7863 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7864 if (type_mode == float16_type_mode
7865 || type_mode == bfloat16_type_mode)
7866 return float_type_node;
7867 break;
7868 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7869 if (type_mode == float16_type_mode
7870 || type_mode == bfloat16_type_mode
7871 || type_mode == float_type_mode)
7872 return double_type_node;
7873 break;
7874 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7875 if (type_mode == float16_type_mode
7876 || type_mode == bfloat16_type_mode
7877 || type_mode == float_type_mode
7878 || type_mode == double_type_mode)
7879 return long_double_type_node;
7880 break;
7881 default:
7882 gcc_unreachable ();
7884 break;
7886 case COMPLEX_TYPE:
7888 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7889 return NULL_TREE;
7890 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7891 switch (target_flt_eval_method)
7893 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7894 if (type_mode == float16_type_mode
7895 || type_mode == bfloat16_type_mode)
7896 return complex_float_type_node;
7897 break;
7898 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7899 if (type_mode == float16_type_mode
7900 || type_mode == bfloat16_type_mode
7901 || type_mode == float_type_mode)
7902 return complex_double_type_node;
7903 break;
7904 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7905 if (type_mode == float16_type_mode
7906 || type_mode == bfloat16_type_mode
7907 || type_mode == float_type_mode
7908 || type_mode == double_type_mode)
7909 return complex_long_double_type_node;
7910 break;
7911 default:
7912 gcc_unreachable ();
7914 break;
7916 default:
7917 break;
7920 return NULL_TREE;
7923 /* Return OP, stripped of any conversions to wider types as much as is safe.
7924 Converting the value back to OP's type makes a value equivalent to OP.
7926 If FOR_TYPE is nonzero, we return a value which, if converted to
7927 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7929 OP must have integer, real or enumeral type. Pointers are not allowed!
7931 There are some cases where the obvious value we could return
7932 would regenerate to OP if converted to OP's type,
7933 but would not extend like OP to wider types.
7934 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7935 For example, if OP is (unsigned short)(signed char)-1,
7936 we avoid returning (signed char)-1 if FOR_TYPE is int,
7937 even though extending that to an unsigned short would regenerate OP,
7938 since the result of extending (signed char)-1 to (int)
7939 is different from (int) OP. */
7941 tree
7942 get_unwidened (tree op, tree for_type)
7944 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7945 tree type = TREE_TYPE (op);
7946 unsigned final_prec
7947 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7948 int uns
7949 = (for_type != 0 && for_type != type
7950 && final_prec > TYPE_PRECISION (type)
7951 && TYPE_UNSIGNED (type));
7952 tree win = op;
7954 while (CONVERT_EXPR_P (op))
7956 int bitschange;
7958 /* TYPE_PRECISION on vector types has different meaning
7959 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7960 so avoid them here. */
7961 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7962 break;
7964 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7965 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7967 /* Truncations are many-one so cannot be removed.
7968 Unless we are later going to truncate down even farther. */
7969 if (bitschange < 0
7970 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7971 break;
7973 /* See what's inside this conversion. If we decide to strip it,
7974 we will set WIN. */
7975 op = TREE_OPERAND (op, 0);
7977 /* If we have not stripped any zero-extensions (uns is 0),
7978 we can strip any kind of extension.
7979 If we have previously stripped a zero-extension,
7980 only zero-extensions can safely be stripped.
7981 Any extension can be stripped if the bits it would produce
7982 are all going to be discarded later by truncating to FOR_TYPE. */
7984 if (bitschange > 0)
7986 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7987 win = op;
7988 /* TYPE_UNSIGNED says whether this is a zero-extension.
7989 Let's avoid computing it if it does not affect WIN
7990 and if UNS will not be needed again. */
7991 if ((uns
7992 || CONVERT_EXPR_P (op))
7993 && TYPE_UNSIGNED (TREE_TYPE (op)))
7995 uns = 1;
7996 win = op;
8001 /* If we finally reach a constant see if it fits in sth smaller and
8002 in that case convert it. */
8003 if (TREE_CODE (win) == INTEGER_CST)
8005 tree wtype = TREE_TYPE (win);
8006 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8007 if (for_type)
8008 prec = MAX (prec, final_prec);
8009 if (prec < TYPE_PRECISION (wtype))
8011 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8012 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8013 win = fold_convert (t, win);
8017 return win;
8020 /* Return OP or a simpler expression for a narrower value
8021 which can be sign-extended or zero-extended to give back OP.
8022 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8023 or 0 if the value should be sign-extended. */
8025 tree
8026 get_narrower (tree op, int *unsignedp_ptr)
8028 int uns = 0;
8029 bool first = true;
8030 tree win = op;
8031 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8033 if (TREE_CODE (op) == COMPOUND_EXPR)
8036 op = TREE_OPERAND (op, 1);
8037 while (TREE_CODE (op) == COMPOUND_EXPR);
8038 tree ret = get_narrower (op, unsignedp_ptr);
8039 if (ret == op)
8040 return win;
8041 auto_vec <tree, 16> v;
8042 unsigned int i;
8043 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8044 op = TREE_OPERAND (op, 1))
8045 v.safe_push (op);
8046 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8047 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8048 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8049 ret);
8050 return ret;
8052 while (TREE_CODE (op) == NOP_EXPR)
8054 int bitschange
8055 = (TYPE_PRECISION (TREE_TYPE (op))
8056 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8058 /* Truncations are many-one so cannot be removed. */
8059 if (bitschange < 0)
8060 break;
8062 /* See what's inside this conversion. If we decide to strip it,
8063 we will set WIN. */
8065 if (bitschange > 0)
8067 op = TREE_OPERAND (op, 0);
8068 /* An extension: the outermost one can be stripped,
8069 but remember whether it is zero or sign extension. */
8070 if (first)
8071 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8072 /* Otherwise, if a sign extension has been stripped,
8073 only sign extensions can now be stripped;
8074 if a zero extension has been stripped, only zero-extensions. */
8075 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8076 break;
8077 first = false;
8079 else /* bitschange == 0 */
8081 /* A change in nominal type can always be stripped, but we must
8082 preserve the unsignedness. */
8083 if (first)
8084 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8085 first = false;
8086 op = TREE_OPERAND (op, 0);
8087 /* Keep trying to narrow, but don't assign op to win if it
8088 would turn an integral type into something else. */
8089 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8090 continue;
8093 win = op;
8096 if (TREE_CODE (op) == COMPONENT_REF
8097 /* Since type_for_size always gives an integer type. */
8098 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8099 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8100 /* Ensure field is laid out already. */
8101 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8102 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8104 unsigned HOST_WIDE_INT innerprec
8105 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8106 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8107 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8108 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8110 /* We can get this structure field in a narrower type that fits it,
8111 but the resulting extension to its nominal type (a fullword type)
8112 must satisfy the same conditions as for other extensions.
8114 Do this only for fields that are aligned (not bit-fields),
8115 because when bit-field insns will be used there is no
8116 advantage in doing this. */
8118 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8119 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8120 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8121 && type != 0)
8123 if (first)
8124 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8125 win = fold_convert (type, op);
8129 *unsignedp_ptr = uns;
8130 return win;
8133 /* Return true if integer constant C has a value that is permissible
8134 for TYPE, an integral type. */
8136 bool
8137 int_fits_type_p (const_tree c, const_tree type)
8139 tree type_low_bound, type_high_bound;
8140 bool ok_for_low_bound, ok_for_high_bound;
8141 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8143 /* Non-standard boolean types can have arbitrary precision but various
8144 transformations assume that they can only take values 0 and +/-1. */
8145 if (TREE_CODE (type) == BOOLEAN_TYPE)
8146 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8148 retry:
8149 type_low_bound = TYPE_MIN_VALUE (type);
8150 type_high_bound = TYPE_MAX_VALUE (type);
8152 /* If at least one bound of the type is a constant integer, we can check
8153 ourselves and maybe make a decision. If no such decision is possible, but
8154 this type is a subtype, try checking against that. Otherwise, use
8155 fits_to_tree_p, which checks against the precision.
8157 Compute the status for each possibly constant bound, and return if we see
8158 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8159 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8160 for "constant known to fit". */
8162 /* Check if c >= type_low_bound. */
8163 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8165 if (tree_int_cst_lt (c, type_low_bound))
8166 return false;
8167 ok_for_low_bound = true;
8169 else
8170 ok_for_low_bound = false;
8172 /* Check if c <= type_high_bound. */
8173 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8175 if (tree_int_cst_lt (type_high_bound, c))
8176 return false;
8177 ok_for_high_bound = true;
8179 else
8180 ok_for_high_bound = false;
8182 /* If the constant fits both bounds, the result is known. */
8183 if (ok_for_low_bound && ok_for_high_bound)
8184 return true;
8186 /* Perform some generic filtering which may allow making a decision
8187 even if the bounds are not constant. First, negative integers
8188 never fit in unsigned types, */
8189 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8190 return false;
8192 /* Second, narrower types always fit in wider ones. */
8193 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8194 return true;
8196 /* Third, unsigned integers with top bit set never fit signed types. */
8197 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8199 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8200 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8202 /* When a tree_cst is converted to a wide-int, the precision
8203 is taken from the type. However, if the precision of the
8204 mode underneath the type is smaller than that, it is
8205 possible that the value will not fit. The test below
8206 fails if any bit is set between the sign bit of the
8207 underlying mode and the top bit of the type. */
8208 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8209 return false;
8211 else if (wi::neg_p (wi::to_wide (c)))
8212 return false;
8215 /* If we haven't been able to decide at this point, there nothing more we
8216 can check ourselves here. Look at the base type if we have one and it
8217 has the same precision. */
8218 if (TREE_CODE (type) == INTEGER_TYPE
8219 && TREE_TYPE (type) != 0
8220 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8222 type = TREE_TYPE (type);
8223 goto retry;
8226 /* Or to fits_to_tree_p, if nothing else. */
8227 return wi::fits_to_tree_p (wi::to_wide (c), type);
8230 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8231 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8232 represented (assuming two's-complement arithmetic) within the bit
8233 precision of the type are returned instead. */
8235 void
8236 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8238 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8239 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8240 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8241 else
8243 if (TYPE_UNSIGNED (type))
8244 mpz_set_ui (min, 0);
8245 else
8247 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8248 wi::to_mpz (mn, min, SIGNED);
8252 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8253 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8254 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8255 else
8257 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8258 wi::to_mpz (mn, max, TYPE_SIGN (type));
8262 /* Return true if VAR is an automatic variable. */
8264 bool
8265 auto_var_p (const_tree var)
8267 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8268 || TREE_CODE (var) == PARM_DECL)
8269 && ! TREE_STATIC (var))
8270 || TREE_CODE (var) == RESULT_DECL);
8273 /* Return true if VAR is an automatic variable defined in function FN. */
8275 bool
8276 auto_var_in_fn_p (const_tree var, const_tree fn)
8278 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8279 && (auto_var_p (var)
8280 || TREE_CODE (var) == LABEL_DECL));
8283 /* Subprogram of following function. Called by walk_tree.
8285 Return *TP if it is an automatic variable or parameter of the
8286 function passed in as DATA. */
8288 static tree
8289 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8291 tree fn = (tree) data;
8293 if (TYPE_P (*tp))
8294 *walk_subtrees = 0;
8296 else if (DECL_P (*tp)
8297 && auto_var_in_fn_p (*tp, fn))
8298 return *tp;
8300 return NULL_TREE;
8303 /* Returns true if T is, contains, or refers to a type with variable
8304 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8305 arguments, but not the return type. If FN is nonzero, only return
8306 true if a modifier of the type or position of FN is a variable or
8307 parameter inside FN.
8309 This concept is more general than that of C99 'variably modified types':
8310 in C99, a struct type is never variably modified because a VLA may not
8311 appear as a structure member. However, in GNU C code like:
8313 struct S { int i[f()]; };
8315 is valid, and other languages may define similar constructs. */
8317 bool
8318 variably_modified_type_p (tree type, tree fn)
8320 tree t;
8322 /* Test if T is either variable (if FN is zero) or an expression containing
8323 a variable in FN. If TYPE isn't gimplified, return true also if
8324 gimplify_one_sizepos would gimplify the expression into a local
8325 variable. */
8326 #define RETURN_TRUE_IF_VAR(T) \
8327 do { tree _t = (T); \
8328 if (_t != NULL_TREE \
8329 && _t != error_mark_node \
8330 && !CONSTANT_CLASS_P (_t) \
8331 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8332 && (!fn \
8333 || (!TYPE_SIZES_GIMPLIFIED (type) \
8334 && (TREE_CODE (_t) != VAR_DECL \
8335 && !CONTAINS_PLACEHOLDER_P (_t))) \
8336 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8337 return true; } while (0)
8339 if (type == error_mark_node)
8340 return false;
8342 /* If TYPE itself has variable size, it is variably modified. */
8343 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8344 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8346 switch (TREE_CODE (type))
8348 case POINTER_TYPE:
8349 case REFERENCE_TYPE:
8350 case VECTOR_TYPE:
8351 /* Ada can have pointer types refering to themselves indirectly. */
8352 if (TREE_VISITED (type))
8353 return false;
8354 TREE_VISITED (type) = true;
8355 if (variably_modified_type_p (TREE_TYPE (type), fn))
8357 TREE_VISITED (type) = false;
8358 return true;
8360 TREE_VISITED (type) = false;
8361 break;
8363 case FUNCTION_TYPE:
8364 case METHOD_TYPE:
8365 /* If TYPE is a function type, it is variably modified if the
8366 return type is variably modified. */
8367 if (variably_modified_type_p (TREE_TYPE (type), fn))
8368 return true;
8369 break;
8371 case INTEGER_TYPE:
8372 case REAL_TYPE:
8373 case FIXED_POINT_TYPE:
8374 case ENUMERAL_TYPE:
8375 case BOOLEAN_TYPE:
8376 /* Scalar types are variably modified if their end points
8377 aren't constant. */
8378 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8379 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8380 break;
8382 case RECORD_TYPE:
8383 case UNION_TYPE:
8384 case QUAL_UNION_TYPE:
8385 /* We can't see if any of the fields are variably-modified by the
8386 definition we normally use, since that would produce infinite
8387 recursion via pointers. */
8388 /* This is variably modified if some field's type is. */
8389 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8390 if (TREE_CODE (t) == FIELD_DECL)
8392 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8393 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8394 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8396 /* If the type is a qualified union, then the DECL_QUALIFIER
8397 of fields can also be an expression containing a variable. */
8398 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8399 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8401 /* If the field is a qualified union, then it's only a container
8402 for what's inside so we look into it. That's necessary in LTO
8403 mode because the sizes of the field tested above have been set
8404 to PLACEHOLDER_EXPRs by free_lang_data. */
8405 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8406 && variably_modified_type_p (TREE_TYPE (t), fn))
8407 return true;
8409 break;
8411 case ARRAY_TYPE:
8412 /* Do not call ourselves to avoid infinite recursion. This is
8413 variably modified if the element type is. */
8414 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8415 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8416 break;
8418 default:
8419 break;
8422 /* The current language may have other cases to check, but in general,
8423 all other types are not variably modified. */
8424 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8426 #undef RETURN_TRUE_IF_VAR
8429 /* Given a DECL or TYPE, return the scope in which it was declared, or
8430 NULL_TREE if there is no containing scope. */
8432 tree
8433 get_containing_scope (const_tree t)
8435 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8438 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8440 const_tree
8441 get_ultimate_context (const_tree decl)
8443 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8445 if (TREE_CODE (decl) == BLOCK)
8446 decl = BLOCK_SUPERCONTEXT (decl);
8447 else
8448 decl = get_containing_scope (decl);
8450 return decl;
8453 /* Return the innermost context enclosing DECL that is
8454 a FUNCTION_DECL, or zero if none. */
8456 tree
8457 decl_function_context (const_tree decl)
8459 tree context;
8461 if (TREE_CODE (decl) == ERROR_MARK)
8462 return 0;
8464 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8465 where we look up the function at runtime. Such functions always take
8466 a first argument of type 'pointer to real context'.
8468 C++ should really be fixed to use DECL_CONTEXT for the real context,
8469 and use something else for the "virtual context". */
8470 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8471 context
8472 = TYPE_MAIN_VARIANT
8473 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8474 else
8475 context = DECL_CONTEXT (decl);
8477 while (context && TREE_CODE (context) != FUNCTION_DECL)
8479 if (TREE_CODE (context) == BLOCK)
8480 context = BLOCK_SUPERCONTEXT (context);
8481 else
8482 context = get_containing_scope (context);
8485 return context;
8488 /* Return the innermost context enclosing DECL that is
8489 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8490 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8492 tree
8493 decl_type_context (const_tree decl)
8495 tree context = DECL_CONTEXT (decl);
8497 while (context)
8498 switch (TREE_CODE (context))
8500 case NAMESPACE_DECL:
8501 case TRANSLATION_UNIT_DECL:
8502 return NULL_TREE;
8504 case RECORD_TYPE:
8505 case UNION_TYPE:
8506 case QUAL_UNION_TYPE:
8507 return context;
8509 case TYPE_DECL:
8510 case FUNCTION_DECL:
8511 context = DECL_CONTEXT (context);
8512 break;
8514 case BLOCK:
8515 context = BLOCK_SUPERCONTEXT (context);
8516 break;
8518 default:
8519 gcc_unreachable ();
8522 return NULL_TREE;
8525 /* CALL is a CALL_EXPR. Return the declaration for the function
8526 called, or NULL_TREE if the called function cannot be
8527 determined. */
8529 tree
8530 get_callee_fndecl (const_tree call)
8532 tree addr;
8534 if (call == error_mark_node)
8535 return error_mark_node;
8537 /* It's invalid to call this function with anything but a
8538 CALL_EXPR. */
8539 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8541 /* The first operand to the CALL is the address of the function
8542 called. */
8543 addr = CALL_EXPR_FN (call);
8545 /* If there is no function, return early. */
8546 if (addr == NULL_TREE)
8547 return NULL_TREE;
8549 STRIP_NOPS (addr);
8551 /* If this is a readonly function pointer, extract its initial value. */
8552 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8553 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8554 && DECL_INITIAL (addr))
8555 addr = DECL_INITIAL (addr);
8557 /* If the address is just `&f' for some function `f', then we know
8558 that `f' is being called. */
8559 if (TREE_CODE (addr) == ADDR_EXPR
8560 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8561 return TREE_OPERAND (addr, 0);
8563 /* We couldn't figure out what was being called. */
8564 return NULL_TREE;
8567 /* Return true when STMTs arguments and return value match those of FNDECL,
8568 a decl of a builtin function. */
8570 static bool
8571 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8573 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8575 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8576 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8577 fndecl = decl;
8579 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8580 if (gimple_form
8581 ? !useless_type_conversion_p (TREE_TYPE (call),
8582 TREE_TYPE (TREE_TYPE (fndecl)))
8583 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8584 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8585 return false;
8587 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8588 unsigned nargs = call_expr_nargs (call);
8589 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8591 /* Variadic args follow. */
8592 if (!targs)
8593 return true;
8594 tree arg = CALL_EXPR_ARG (call, i);
8595 tree type = TREE_VALUE (targs);
8596 if (gimple_form
8597 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8598 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8600 /* For pointer arguments be more forgiving, e.g. due to
8601 FILE * vs. fileptr_type_node, or say char * vs. const char *
8602 differences etc. */
8603 if (!gimple_form
8604 && POINTER_TYPE_P (type)
8605 && POINTER_TYPE_P (TREE_TYPE (arg))
8606 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8607 continue;
8608 /* char/short integral arguments are promoted to int
8609 by several frontends if targetm.calls.promote_prototypes
8610 is true. Allow such promotion too. */
8611 if (INTEGRAL_TYPE_P (type)
8612 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8613 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8614 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8615 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8616 && (gimple_form
8617 ? useless_type_conversion_p (integer_type_node,
8618 TREE_TYPE (arg))
8619 : tree_nop_conversion_p (integer_type_node,
8620 TREE_TYPE (arg))))
8621 continue;
8622 return false;
8625 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8626 return false;
8627 return true;
8630 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8631 return the associated function code, otherwise return CFN_LAST. */
8633 combined_fn
8634 get_call_combined_fn (const_tree call)
8636 /* It's invalid to call this function with anything but a CALL_EXPR. */
8637 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8639 if (!CALL_EXPR_FN (call))
8640 return as_combined_fn (CALL_EXPR_IFN (call));
8642 tree fndecl = get_callee_fndecl (call);
8643 if (fndecl
8644 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8645 && tree_builtin_call_types_compatible_p (call, fndecl))
8646 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8648 return CFN_LAST;
8651 /* Comparator of indices based on tree_node_counts. */
8653 static int
8654 tree_nodes_cmp (const void *p1, const void *p2)
8656 const unsigned *n1 = (const unsigned *)p1;
8657 const unsigned *n2 = (const unsigned *)p2;
8659 return tree_node_counts[*n1] - tree_node_counts[*n2];
8662 /* Comparator of indices based on tree_code_counts. */
8664 static int
8665 tree_codes_cmp (const void *p1, const void *p2)
8667 const unsigned *n1 = (const unsigned *)p1;
8668 const unsigned *n2 = (const unsigned *)p2;
8670 return tree_code_counts[*n1] - tree_code_counts[*n2];
8673 #define TREE_MEM_USAGE_SPACES 40
8675 /* Print debugging information about tree nodes generated during the compile,
8676 and any language-specific information. */
8678 void
8679 dump_tree_statistics (void)
8681 if (GATHER_STATISTICS)
8683 uint64_t total_nodes, total_bytes;
8684 fprintf (stderr, "\nKind Nodes Bytes\n");
8685 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8686 total_nodes = total_bytes = 0;
8689 auto_vec<unsigned> indices (all_kinds);
8690 for (unsigned i = 0; i < all_kinds; i++)
8691 indices.quick_push (i);
8692 indices.qsort (tree_nodes_cmp);
8694 for (unsigned i = 0; i < (int) all_kinds; i++)
8696 unsigned j = indices[i];
8697 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8698 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8699 SIZE_AMOUNT (tree_node_sizes[j]));
8700 total_nodes += tree_node_counts[j];
8701 total_bytes += tree_node_sizes[j];
8703 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8704 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8705 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8706 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8710 fprintf (stderr, "Code Nodes\n");
8711 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8713 auto_vec<unsigned> indices (MAX_TREE_CODES);
8714 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8715 indices.quick_push (i);
8716 indices.qsort (tree_codes_cmp);
8718 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8720 unsigned j = indices[i];
8721 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8722 get_tree_code_name ((enum tree_code) j),
8723 SIZE_AMOUNT (tree_code_counts[j]));
8725 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8726 fprintf (stderr, "\n");
8727 ssanames_print_statistics ();
8728 fprintf (stderr, "\n");
8729 phinodes_print_statistics ();
8730 fprintf (stderr, "\n");
8733 else
8734 fprintf (stderr, "(No per-node statistics)\n");
8736 print_type_hash_statistics ();
8737 print_debug_expr_statistics ();
8738 print_value_expr_statistics ();
8739 lang_hooks.print_statistics ();
8742 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8744 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8746 unsigned
8747 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8749 /* This relies on the raw feedback's top 4 bits being zero. */
8750 #define FEEDBACK(X) ((X) * 0x04c11db7)
8751 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8752 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8753 static const unsigned syndromes[16] =
8755 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8756 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8757 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8758 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8760 #undef FEEDBACK
8761 #undef SYNDROME
8763 value <<= (32 - bytes * 8);
8764 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8766 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8768 chksum = (chksum << 4) ^ feedback;
8771 return chksum;
8774 /* Generate a crc32 of a string. */
8776 unsigned
8777 crc32_string (unsigned chksum, const char *string)
8780 chksum = crc32_byte (chksum, *string);
8781 while (*string++);
8782 return chksum;
8785 /* P is a string that will be used in a symbol. Mask out any characters
8786 that are not valid in that context. */
8788 void
8789 clean_symbol_name (char *p)
8791 for (; *p; p++)
8792 if (! (ISALNUM (*p)
8793 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8794 || *p == '$'
8795 #endif
8796 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8797 || *p == '.'
8798 #endif
8800 *p = '_';
8803 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8805 /* Create a unique anonymous identifier. The identifier is still a
8806 valid assembly label. */
8808 tree
8809 make_anon_name ()
8811 const char *fmt =
8812 #if !defined (NO_DOT_IN_LABEL)
8814 #elif !defined (NO_DOLLAR_IN_LABEL)
8816 #else
8818 #endif
8819 "_anon_%d";
8821 char buf[24];
8822 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8823 gcc_checking_assert (len < int (sizeof (buf)));
8825 tree id = get_identifier_with_length (buf, len);
8826 IDENTIFIER_ANON_P (id) = true;
8828 return id;
8831 /* Generate a name for a special-purpose function.
8832 The generated name may need to be unique across the whole link.
8833 Changes to this function may also require corresponding changes to
8834 xstrdup_mask_random.
8835 TYPE is some string to identify the purpose of this function to the
8836 linker or collect2; it must start with an uppercase letter,
8837 one of:
8838 I - for constructors
8839 D - for destructors
8840 N - for C++ anonymous namespaces
8841 F - for DWARF unwind frame information. */
8843 tree
8844 get_file_function_name (const char *type)
8846 char *buf;
8847 const char *p;
8848 char *q;
8850 /* If we already have a name we know to be unique, just use that. */
8851 if (first_global_object_name)
8852 p = q = ASTRDUP (first_global_object_name);
8853 /* If the target is handling the constructors/destructors, they
8854 will be local to this file and the name is only necessary for
8855 debugging purposes.
8856 We also assign sub_I and sub_D sufixes to constructors called from
8857 the global static constructors. These are always local. */
8858 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8859 || (startswith (type, "sub_")
8860 && (type[4] == 'I' || type[4] == 'D')))
8862 const char *file = main_input_filename;
8863 if (! file)
8864 file = LOCATION_FILE (input_location);
8865 /* Just use the file's basename, because the full pathname
8866 might be quite long. */
8867 p = q = ASTRDUP (lbasename (file));
8869 else
8871 /* Otherwise, the name must be unique across the entire link.
8872 We don't have anything that we know to be unique to this translation
8873 unit, so use what we do have and throw in some randomness. */
8874 unsigned len;
8875 const char *name = weak_global_object_name;
8876 const char *file = main_input_filename;
8878 if (! name)
8879 name = "";
8880 if (! file)
8881 file = LOCATION_FILE (input_location);
8883 len = strlen (file);
8884 q = (char *) alloca (9 + 19 + len + 1);
8885 memcpy (q, file, len + 1);
8887 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8888 crc32_string (0, name), get_random_seed (false));
8890 p = q;
8893 clean_symbol_name (q);
8894 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8895 + strlen (type));
8897 /* Set up the name of the file-level functions we may need.
8898 Use a global object (which is already required to be unique over
8899 the program) rather than the file name (which imposes extra
8900 constraints). */
8901 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8903 return get_identifier (buf);
8906 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8908 /* Complain that the tree code of NODE does not match the expected 0
8909 terminated list of trailing codes. The trailing code list can be
8910 empty, for a more vague error message. FILE, LINE, and FUNCTION
8911 are of the caller. */
8913 void
8914 tree_check_failed (const_tree node, const char *file,
8915 int line, const char *function, ...)
8917 va_list args;
8918 const char *buffer;
8919 unsigned length = 0;
8920 enum tree_code code;
8922 va_start (args, function);
8923 while ((code = (enum tree_code) va_arg (args, int)))
8924 length += 4 + strlen (get_tree_code_name (code));
8925 va_end (args);
8926 if (length)
8928 char *tmp;
8929 va_start (args, function);
8930 length += strlen ("expected ");
8931 buffer = tmp = (char *) alloca (length);
8932 length = 0;
8933 while ((code = (enum tree_code) va_arg (args, int)))
8935 const char *prefix = length ? " or " : "expected ";
8937 strcpy (tmp + length, prefix);
8938 length += strlen (prefix);
8939 strcpy (tmp + length, get_tree_code_name (code));
8940 length += strlen (get_tree_code_name (code));
8942 va_end (args);
8944 else
8945 buffer = "unexpected node";
8947 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8948 buffer, get_tree_code_name (TREE_CODE (node)),
8949 function, trim_filename (file), line);
8952 /* Complain that the tree code of NODE does match the expected 0
8953 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8954 the caller. */
8956 void
8957 tree_not_check_failed (const_tree node, const char *file,
8958 int line, const char *function, ...)
8960 va_list args;
8961 char *buffer;
8962 unsigned length = 0;
8963 enum tree_code code;
8965 va_start (args, function);
8966 while ((code = (enum tree_code) va_arg (args, int)))
8967 length += 4 + strlen (get_tree_code_name (code));
8968 va_end (args);
8969 va_start (args, function);
8970 buffer = (char *) alloca (length);
8971 length = 0;
8972 while ((code = (enum tree_code) va_arg (args, int)))
8974 if (length)
8976 strcpy (buffer + length, " or ");
8977 length += 4;
8979 strcpy (buffer + length, get_tree_code_name (code));
8980 length += strlen (get_tree_code_name (code));
8982 va_end (args);
8984 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8985 buffer, get_tree_code_name (TREE_CODE (node)),
8986 function, trim_filename (file), line);
8989 /* Similar to tree_check_failed, except that we check for a class of tree
8990 code, given in CL. */
8992 void
8993 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8994 const char *file, int line, const char *function)
8996 internal_error
8997 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8998 TREE_CODE_CLASS_STRING (cl),
8999 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9000 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9003 /* Similar to tree_check_failed, except that instead of specifying a
9004 dozen codes, use the knowledge that they're all sequential. */
9006 void
9007 tree_range_check_failed (const_tree node, const char *file, int line,
9008 const char *function, enum tree_code c1,
9009 enum tree_code c2)
9011 char *buffer;
9012 unsigned length = 0;
9013 unsigned int c;
9015 for (c = c1; c <= c2; ++c)
9016 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9018 length += strlen ("expected ");
9019 buffer = (char *) alloca (length);
9020 length = 0;
9022 for (c = c1; c <= c2; ++c)
9024 const char *prefix = length ? " or " : "expected ";
9026 strcpy (buffer + length, prefix);
9027 length += strlen (prefix);
9028 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9029 length += strlen (get_tree_code_name ((enum tree_code) c));
9032 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9033 buffer, get_tree_code_name (TREE_CODE (node)),
9034 function, trim_filename (file), line);
9038 /* Similar to tree_check_failed, except that we check that a tree does
9039 not have the specified code, given in CL. */
9041 void
9042 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9043 const char *file, int line, const char *function)
9045 internal_error
9046 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9047 TREE_CODE_CLASS_STRING (cl),
9048 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9049 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9053 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9055 void
9056 omp_clause_check_failed (const_tree node, const char *file, int line,
9057 const char *function, enum omp_clause_code code)
9059 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9060 "in %s, at %s:%d",
9061 omp_clause_code_name[code],
9062 get_tree_code_name (TREE_CODE (node)),
9063 function, trim_filename (file), line);
9067 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9069 void
9070 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9071 const char *function, enum omp_clause_code c1,
9072 enum omp_clause_code c2)
9074 char *buffer;
9075 unsigned length = 0;
9076 unsigned int c;
9078 for (c = c1; c <= c2; ++c)
9079 length += 4 + strlen (omp_clause_code_name[c]);
9081 length += strlen ("expected ");
9082 buffer = (char *) alloca (length);
9083 length = 0;
9085 for (c = c1; c <= c2; ++c)
9087 const char *prefix = length ? " or " : "expected ";
9089 strcpy (buffer + length, prefix);
9090 length += strlen (prefix);
9091 strcpy (buffer + length, omp_clause_code_name[c]);
9092 length += strlen (omp_clause_code_name[c]);
9095 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9096 buffer, omp_clause_code_name[TREE_CODE (node)],
9097 function, trim_filename (file), line);
9101 #undef DEFTREESTRUCT
9102 #define DEFTREESTRUCT(VAL, NAME) NAME,
9104 static const char *ts_enum_names[] = {
9105 #include "treestruct.def"
9107 #undef DEFTREESTRUCT
9109 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9111 /* Similar to tree_class_check_failed, except that we check for
9112 whether CODE contains the tree structure identified by EN. */
9114 void
9115 tree_contains_struct_check_failed (const_tree node,
9116 const enum tree_node_structure_enum en,
9117 const char *file, int line,
9118 const char *function)
9120 internal_error
9121 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9122 TS_ENUM_NAME (en),
9123 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9127 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9128 (dynamically sized) vector. */
9130 void
9131 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9132 const char *function)
9134 internal_error
9135 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9136 "at %s:%d",
9137 idx + 1, len, function, trim_filename (file), line);
9140 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9141 (dynamically sized) vector. */
9143 void
9144 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9145 const char *function)
9147 internal_error
9148 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9149 idx + 1, len, function, trim_filename (file), line);
9152 /* Similar to above, except that the check is for the bounds of the operand
9153 vector of an expression node EXP. */
9155 void
9156 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9157 int line, const char *function)
9159 enum tree_code code = TREE_CODE (exp);
9160 internal_error
9161 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9162 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9163 function, trim_filename (file), line);
9166 /* Similar to above, except that the check is for the number of
9167 operands of an OMP_CLAUSE node. */
9169 void
9170 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9171 int line, const char *function)
9173 internal_error
9174 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9175 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9176 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9177 trim_filename (file), line);
9179 #endif /* ENABLE_TREE_CHECKING */
9181 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9182 and mapped to the machine mode MODE. Initialize its fields and build
9183 the information necessary for debugging output. */
9185 static tree
9186 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9188 tree t;
9189 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9191 t = make_node (VECTOR_TYPE);
9192 TREE_TYPE (t) = mv_innertype;
9193 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9194 SET_TYPE_MODE (t, mode);
9196 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9197 SET_TYPE_STRUCTURAL_EQUALITY (t);
9198 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9199 || mode != VOIDmode)
9200 && !VECTOR_BOOLEAN_TYPE_P (t))
9201 TYPE_CANONICAL (t)
9202 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9204 layout_type (t);
9206 hashval_t hash = type_hash_canon_hash (t);
9207 t = type_hash_canon (hash, t);
9209 /* We have built a main variant, based on the main variant of the
9210 inner type. Use it to build the variant we return. */
9211 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9212 && TREE_TYPE (t) != innertype)
9213 return build_type_attribute_qual_variant (t,
9214 TYPE_ATTRIBUTES (innertype),
9215 TYPE_QUALS (innertype));
9217 return t;
9220 static tree
9221 make_or_reuse_type (unsigned size, int unsignedp)
9223 int i;
9225 if (size == INT_TYPE_SIZE)
9226 return unsignedp ? unsigned_type_node : integer_type_node;
9227 if (size == CHAR_TYPE_SIZE)
9228 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9229 if (size == SHORT_TYPE_SIZE)
9230 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9231 if (size == LONG_TYPE_SIZE)
9232 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9233 if (size == LONG_LONG_TYPE_SIZE)
9234 return (unsignedp ? long_long_unsigned_type_node
9235 : long_long_integer_type_node);
9237 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9238 if (size == int_n_data[i].bitsize
9239 && int_n_enabled_p[i])
9240 return (unsignedp ? int_n_trees[i].unsigned_type
9241 : int_n_trees[i].signed_type);
9243 if (unsignedp)
9244 return make_unsigned_type (size);
9245 else
9246 return make_signed_type (size);
9249 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9251 static tree
9252 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9254 if (satp)
9256 if (size == SHORT_FRACT_TYPE_SIZE)
9257 return unsignedp ? sat_unsigned_short_fract_type_node
9258 : sat_short_fract_type_node;
9259 if (size == FRACT_TYPE_SIZE)
9260 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9261 if (size == LONG_FRACT_TYPE_SIZE)
9262 return unsignedp ? sat_unsigned_long_fract_type_node
9263 : sat_long_fract_type_node;
9264 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9265 return unsignedp ? sat_unsigned_long_long_fract_type_node
9266 : sat_long_long_fract_type_node;
9268 else
9270 if (size == SHORT_FRACT_TYPE_SIZE)
9271 return unsignedp ? unsigned_short_fract_type_node
9272 : short_fract_type_node;
9273 if (size == FRACT_TYPE_SIZE)
9274 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9275 if (size == LONG_FRACT_TYPE_SIZE)
9276 return unsignedp ? unsigned_long_fract_type_node
9277 : long_fract_type_node;
9278 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9279 return unsignedp ? unsigned_long_long_fract_type_node
9280 : long_long_fract_type_node;
9283 return make_fract_type (size, unsignedp, satp);
9286 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9288 static tree
9289 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9291 if (satp)
9293 if (size == SHORT_ACCUM_TYPE_SIZE)
9294 return unsignedp ? sat_unsigned_short_accum_type_node
9295 : sat_short_accum_type_node;
9296 if (size == ACCUM_TYPE_SIZE)
9297 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9298 if (size == LONG_ACCUM_TYPE_SIZE)
9299 return unsignedp ? sat_unsigned_long_accum_type_node
9300 : sat_long_accum_type_node;
9301 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9302 return unsignedp ? sat_unsigned_long_long_accum_type_node
9303 : sat_long_long_accum_type_node;
9305 else
9307 if (size == SHORT_ACCUM_TYPE_SIZE)
9308 return unsignedp ? unsigned_short_accum_type_node
9309 : short_accum_type_node;
9310 if (size == ACCUM_TYPE_SIZE)
9311 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9312 if (size == LONG_ACCUM_TYPE_SIZE)
9313 return unsignedp ? unsigned_long_accum_type_node
9314 : long_accum_type_node;
9315 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9316 return unsignedp ? unsigned_long_long_accum_type_node
9317 : long_long_accum_type_node;
9320 return make_accum_type (size, unsignedp, satp);
9324 /* Create an atomic variant node for TYPE. This routine is called
9325 during initialization of data types to create the 5 basic atomic
9326 types. The generic build_variant_type function requires these to
9327 already be set up in order to function properly, so cannot be
9328 called from there. If ALIGN is non-zero, then ensure alignment is
9329 overridden to this value. */
9331 static tree
9332 build_atomic_base (tree type, unsigned int align)
9334 tree t;
9336 /* Make sure its not already registered. */
9337 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9338 return t;
9340 t = build_variant_type_copy (type);
9341 set_type_quals (t, TYPE_QUAL_ATOMIC);
9343 if (align)
9344 SET_TYPE_ALIGN (t, align);
9346 return t;
9349 /* Information about the _FloatN and _FloatNx types. This must be in
9350 the same order as the corresponding TI_* enum values. */
9351 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9353 { 16, false },
9354 { 32, false },
9355 { 64, false },
9356 { 128, false },
9357 { 32, true },
9358 { 64, true },
9359 { 128, true },
9363 /* Create nodes for all integer types (and error_mark_node) using the sizes
9364 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9366 void
9367 build_common_tree_nodes (bool signed_char)
9369 int i;
9371 error_mark_node = make_node (ERROR_MARK);
9372 TREE_TYPE (error_mark_node) = error_mark_node;
9374 initialize_sizetypes ();
9376 /* Define both `signed char' and `unsigned char'. */
9377 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9378 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9379 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9380 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9382 /* Define `char', which is like either `signed char' or `unsigned char'
9383 but not the same as either. */
9384 char_type_node
9385 = (signed_char
9386 ? make_signed_type (CHAR_TYPE_SIZE)
9387 : make_unsigned_type (CHAR_TYPE_SIZE));
9388 TYPE_STRING_FLAG (char_type_node) = 1;
9390 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9391 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9392 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9393 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9394 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9395 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9396 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9397 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9399 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9401 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9402 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9404 if (int_n_enabled_p[i])
9406 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9407 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9411 /* Define a boolean type. This type only represents boolean values but
9412 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9413 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9414 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9415 TYPE_PRECISION (boolean_type_node) = 1;
9416 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9418 /* Define what type to use for size_t. */
9419 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9420 size_type_node = unsigned_type_node;
9421 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9422 size_type_node = long_unsigned_type_node;
9423 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9424 size_type_node = long_long_unsigned_type_node;
9425 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9426 size_type_node = short_unsigned_type_node;
9427 else
9429 int i;
9431 size_type_node = NULL_TREE;
9432 for (i = 0; i < NUM_INT_N_ENTS; i++)
9433 if (int_n_enabled_p[i])
9435 char name[50], altname[50];
9436 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9437 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9439 if (strcmp (name, SIZE_TYPE) == 0
9440 || strcmp (altname, SIZE_TYPE) == 0)
9442 size_type_node = int_n_trees[i].unsigned_type;
9445 if (size_type_node == NULL_TREE)
9446 gcc_unreachable ();
9449 /* Define what type to use for ptrdiff_t. */
9450 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9451 ptrdiff_type_node = integer_type_node;
9452 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9453 ptrdiff_type_node = long_integer_type_node;
9454 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9455 ptrdiff_type_node = long_long_integer_type_node;
9456 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9457 ptrdiff_type_node = short_integer_type_node;
9458 else
9460 ptrdiff_type_node = NULL_TREE;
9461 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9462 if (int_n_enabled_p[i])
9464 char name[50], altname[50];
9465 sprintf (name, "__int%d", int_n_data[i].bitsize);
9466 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9468 if (strcmp (name, PTRDIFF_TYPE) == 0
9469 || strcmp (altname, PTRDIFF_TYPE) == 0)
9470 ptrdiff_type_node = int_n_trees[i].signed_type;
9472 if (ptrdiff_type_node == NULL_TREE)
9473 gcc_unreachable ();
9476 /* Fill in the rest of the sized types. Reuse existing type nodes
9477 when possible. */
9478 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9479 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9480 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9481 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9482 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9484 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9485 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9486 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9487 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9488 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9490 /* Don't call build_qualified type for atomics. That routine does
9491 special processing for atomics, and until they are initialized
9492 it's better not to make that call.
9494 Check to see if there is a target override for atomic types. */
9496 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9497 targetm.atomic_align_for_mode (QImode));
9498 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9499 targetm.atomic_align_for_mode (HImode));
9500 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9501 targetm.atomic_align_for_mode (SImode));
9502 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9503 targetm.atomic_align_for_mode (DImode));
9504 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9505 targetm.atomic_align_for_mode (TImode));
9507 access_public_node = get_identifier ("public");
9508 access_protected_node = get_identifier ("protected");
9509 access_private_node = get_identifier ("private");
9511 /* Define these next since types below may used them. */
9512 integer_zero_node = build_int_cst (integer_type_node, 0);
9513 integer_one_node = build_int_cst (integer_type_node, 1);
9514 integer_three_node = build_int_cst (integer_type_node, 3);
9515 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9517 size_zero_node = size_int (0);
9518 size_one_node = size_int (1);
9519 bitsize_zero_node = bitsize_int (0);
9520 bitsize_one_node = bitsize_int (1);
9521 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9523 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9524 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9526 void_type_node = make_node (VOID_TYPE);
9527 layout_type (void_type_node);
9529 /* We are not going to have real types in C with less than byte alignment,
9530 so we might as well not have any types that claim to have it. */
9531 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9532 TYPE_USER_ALIGN (void_type_node) = 0;
9534 void_node = make_node (VOID_CST);
9535 TREE_TYPE (void_node) = void_type_node;
9537 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9539 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9540 layout_type (TREE_TYPE (null_pointer_node));
9542 ptr_type_node = build_pointer_type (void_type_node);
9543 const_ptr_type_node
9544 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9545 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9546 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9548 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9550 float_type_node = make_node (REAL_TYPE);
9551 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9552 layout_type (float_type_node);
9554 double_type_node = make_node (REAL_TYPE);
9555 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9556 layout_type (double_type_node);
9558 long_double_type_node = make_node (REAL_TYPE);
9559 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9560 layout_type (long_double_type_node);
9562 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9564 int n = floatn_nx_types[i].n;
9565 bool extended = floatn_nx_types[i].extended;
9566 scalar_float_mode mode;
9567 if (!targetm.floatn_mode (n, extended).exists (&mode))
9568 continue;
9569 int precision = GET_MODE_PRECISION (mode);
9570 /* Work around the rs6000 KFmode having precision 113 not
9571 128. */
9572 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9573 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9574 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9575 if (!extended)
9576 gcc_assert (min_precision == n);
9577 if (precision < min_precision)
9578 precision = min_precision;
9579 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9580 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9581 layout_type (FLOATN_NX_TYPE_NODE (i));
9582 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9584 float128t_type_node = float128_type_node;
9585 #ifdef HAVE_BFmode
9586 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9587 && targetm.scalar_mode_supported_p (BFmode)
9588 && targetm.libgcc_floating_mode_supported_p (BFmode))
9590 bfloat16_type_node = make_node (REAL_TYPE);
9591 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9592 layout_type (bfloat16_type_node);
9593 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9595 #endif
9597 float_ptr_type_node = build_pointer_type (float_type_node);
9598 double_ptr_type_node = build_pointer_type (double_type_node);
9599 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9600 integer_ptr_type_node = build_pointer_type (integer_type_node);
9602 /* Fixed size integer types. */
9603 uint16_type_node = make_or_reuse_type (16, 1);
9604 uint32_type_node = make_or_reuse_type (32, 1);
9605 uint64_type_node = make_or_reuse_type (64, 1);
9606 if (targetm.scalar_mode_supported_p (TImode))
9607 uint128_type_node = make_or_reuse_type (128, 1);
9609 /* Decimal float types. */
9610 if (targetm.decimal_float_supported_p ())
9612 dfloat32_type_node = make_node (REAL_TYPE);
9613 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9614 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9615 layout_type (dfloat32_type_node);
9617 dfloat64_type_node = make_node (REAL_TYPE);
9618 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9619 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9620 layout_type (dfloat64_type_node);
9622 dfloat128_type_node = make_node (REAL_TYPE);
9623 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9624 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9625 layout_type (dfloat128_type_node);
9628 complex_integer_type_node = build_complex_type (integer_type_node, true);
9629 complex_float_type_node = build_complex_type (float_type_node, true);
9630 complex_double_type_node = build_complex_type (double_type_node, true);
9631 complex_long_double_type_node = build_complex_type (long_double_type_node,
9632 true);
9634 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9636 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9637 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9638 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9641 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9642 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9643 sat_ ## KIND ## _type_node = \
9644 make_sat_signed_ ## KIND ## _type (SIZE); \
9645 sat_unsigned_ ## KIND ## _type_node = \
9646 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9647 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9648 unsigned_ ## KIND ## _type_node = \
9649 make_unsigned_ ## KIND ## _type (SIZE);
9651 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9652 sat_ ## WIDTH ## KIND ## _type_node = \
9653 make_sat_signed_ ## KIND ## _type (SIZE); \
9654 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9655 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9656 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9657 unsigned_ ## WIDTH ## KIND ## _type_node = \
9658 make_unsigned_ ## KIND ## _type (SIZE);
9660 /* Make fixed-point type nodes based on four different widths. */
9661 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9662 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9663 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9664 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9665 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9667 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9668 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9669 NAME ## _type_node = \
9670 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9671 u ## NAME ## _type_node = \
9672 make_or_reuse_unsigned_ ## KIND ## _type \
9673 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9674 sat_ ## NAME ## _type_node = \
9675 make_or_reuse_sat_signed_ ## KIND ## _type \
9676 (GET_MODE_BITSIZE (MODE ## mode)); \
9677 sat_u ## NAME ## _type_node = \
9678 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9679 (GET_MODE_BITSIZE (U ## MODE ## mode));
9681 /* Fixed-point type and mode nodes. */
9682 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9683 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9684 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9685 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9686 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9687 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9688 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9689 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9690 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9691 MAKE_FIXED_MODE_NODE (accum, da, DA)
9692 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9695 tree t = targetm.build_builtin_va_list ();
9697 /* Many back-ends define record types without setting TYPE_NAME.
9698 If we copied the record type here, we'd keep the original
9699 record type without a name. This breaks name mangling. So,
9700 don't copy record types and let c_common_nodes_and_builtins()
9701 declare the type to be __builtin_va_list. */
9702 if (TREE_CODE (t) != RECORD_TYPE)
9703 t = build_variant_type_copy (t);
9705 va_list_type_node = t;
9708 /* SCEV analyzer global shared trees. */
9709 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9710 TREE_TYPE (chrec_dont_know) = void_type_node;
9711 chrec_known = make_node (SCEV_KNOWN);
9712 TREE_TYPE (chrec_known) = void_type_node;
9715 /* Modify DECL for given flags.
9716 TM_PURE attribute is set only on types, so the function will modify
9717 DECL's type when ECF_TM_PURE is used. */
9719 void
9720 set_call_expr_flags (tree decl, int flags)
9722 if (flags & ECF_NOTHROW)
9723 TREE_NOTHROW (decl) = 1;
9724 if (flags & ECF_CONST)
9725 TREE_READONLY (decl) = 1;
9726 if (flags & ECF_PURE)
9727 DECL_PURE_P (decl) = 1;
9728 if (flags & ECF_LOOPING_CONST_OR_PURE)
9729 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9730 if (flags & ECF_NOVOPS)
9731 DECL_IS_NOVOPS (decl) = 1;
9732 if (flags & ECF_NORETURN)
9733 TREE_THIS_VOLATILE (decl) = 1;
9734 if (flags & ECF_MALLOC)
9735 DECL_IS_MALLOC (decl) = 1;
9736 if (flags & ECF_RETURNS_TWICE)
9737 DECL_IS_RETURNS_TWICE (decl) = 1;
9738 if (flags & ECF_LEAF)
9739 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9740 NULL, DECL_ATTRIBUTES (decl));
9741 if (flags & ECF_COLD)
9742 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9743 NULL, DECL_ATTRIBUTES (decl));
9744 if (flags & ECF_RET1)
9745 DECL_ATTRIBUTES (decl)
9746 = tree_cons (get_identifier ("fn spec"),
9747 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9748 DECL_ATTRIBUTES (decl));
9749 if ((flags & ECF_TM_PURE) && flag_tm)
9750 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9751 if ((flags & ECF_XTHROW))
9752 DECL_ATTRIBUTES (decl)
9753 = tree_cons (get_identifier ("expected_throw"),
9754 NULL, DECL_ATTRIBUTES (decl));
9755 /* Looping const or pure is implied by noreturn.
9756 There is currently no way to declare looping const or looping pure alone. */
9757 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9758 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9762 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9764 static void
9765 local_define_builtin (const char *name, tree type, enum built_in_function code,
9766 const char *library_name, int ecf_flags)
9768 tree decl;
9770 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9771 library_name, NULL_TREE);
9772 set_call_expr_flags (decl, ecf_flags);
9774 set_builtin_decl (code, decl, true);
9777 /* Call this function after instantiating all builtins that the language
9778 front end cares about. This will build the rest of the builtins
9779 and internal functions that are relied upon by the tree optimizers and
9780 the middle-end. */
9782 void
9783 build_common_builtin_nodes (void)
9785 tree tmp, ftype;
9786 int ecf_flags;
9788 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9790 ftype = build_function_type_list (void_type_node,
9791 ptr_type_node,
9792 ptr_type_node,
9793 integer_type_node,
9794 NULL_TREE);
9795 local_define_builtin ("__builtin_clear_padding", ftype,
9796 BUILT_IN_CLEAR_PADDING,
9797 "__builtin_clear_padding",
9798 ECF_LEAF | ECF_NOTHROW);
9801 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9802 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9803 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9804 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9806 ftype = build_function_type (void_type_node, void_list_node);
9807 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9808 local_define_builtin ("__builtin_unreachable", ftype,
9809 BUILT_IN_UNREACHABLE,
9810 "__builtin_unreachable",
9811 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9812 | ECF_CONST | ECF_COLD);
9813 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9814 local_define_builtin ("__builtin_unreachable trap", ftype,
9815 BUILT_IN_UNREACHABLE_TRAP,
9816 "__builtin_unreachable trap",
9817 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9818 | ECF_CONST | ECF_COLD);
9819 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9820 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9821 "abort",
9822 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9823 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9824 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9825 "__builtin_trap",
9826 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9829 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9830 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9832 ftype = build_function_type_list (ptr_type_node,
9833 ptr_type_node, const_ptr_type_node,
9834 size_type_node, NULL_TREE);
9836 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9837 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9838 "memcpy", ECF_NOTHROW | ECF_LEAF);
9839 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9840 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9841 "memmove", ECF_NOTHROW | ECF_LEAF);
9844 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9846 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9847 const_ptr_type_node, size_type_node,
9848 NULL_TREE);
9849 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9850 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9853 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9855 ftype = build_function_type_list (ptr_type_node,
9856 ptr_type_node, integer_type_node,
9857 size_type_node, NULL_TREE);
9858 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9859 "memset", ECF_NOTHROW | ECF_LEAF);
9862 /* If we're checking the stack, `alloca' can throw. */
9863 const int alloca_flags
9864 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9866 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9868 ftype = build_function_type_list (ptr_type_node,
9869 size_type_node, NULL_TREE);
9870 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9871 "alloca", alloca_flags);
9874 ftype = build_function_type_list (ptr_type_node, size_type_node,
9875 size_type_node, NULL_TREE);
9876 local_define_builtin ("__builtin_alloca_with_align", ftype,
9877 BUILT_IN_ALLOCA_WITH_ALIGN,
9878 "__builtin_alloca_with_align",
9879 alloca_flags);
9881 ftype = build_function_type_list (ptr_type_node, size_type_node,
9882 size_type_node, size_type_node, NULL_TREE);
9883 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9884 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9885 "__builtin_alloca_with_align_and_max",
9886 alloca_flags);
9888 ftype = build_function_type_list (void_type_node,
9889 ptr_type_node, ptr_type_node,
9890 ptr_type_node, NULL_TREE);
9891 local_define_builtin ("__builtin_init_trampoline", ftype,
9892 BUILT_IN_INIT_TRAMPOLINE,
9893 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9894 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9895 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9896 "__builtin_init_heap_trampoline",
9897 ECF_NOTHROW | ECF_LEAF);
9898 local_define_builtin ("__builtin_init_descriptor", ftype,
9899 BUILT_IN_INIT_DESCRIPTOR,
9900 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9902 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9903 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9904 BUILT_IN_ADJUST_TRAMPOLINE,
9905 "__builtin_adjust_trampoline",
9906 ECF_CONST | ECF_NOTHROW);
9907 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9908 BUILT_IN_ADJUST_DESCRIPTOR,
9909 "__builtin_adjust_descriptor",
9910 ECF_CONST | ECF_NOTHROW);
9912 ftype = build_function_type_list (void_type_node,
9913 ptr_type_node, ptr_type_node, NULL_TREE);
9914 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9915 local_define_builtin ("__builtin___clear_cache", ftype,
9916 BUILT_IN_CLEAR_CACHE,
9917 "__clear_cache",
9918 ECF_NOTHROW);
9920 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9921 BUILT_IN_NONLOCAL_GOTO,
9922 "__builtin_nonlocal_goto",
9923 ECF_NORETURN | ECF_NOTHROW);
9925 ftype = build_function_type_list (void_type_node,
9926 ptr_type_node, ptr_type_node, NULL_TREE);
9927 local_define_builtin ("__builtin_setjmp_setup", ftype,
9928 BUILT_IN_SETJMP_SETUP,
9929 "__builtin_setjmp_setup", ECF_NOTHROW);
9931 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9932 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9933 BUILT_IN_SETJMP_RECEIVER,
9934 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9936 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9937 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9938 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9940 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9941 local_define_builtin ("__builtin_stack_restore", ftype,
9942 BUILT_IN_STACK_RESTORE,
9943 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9945 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9946 const_ptr_type_node, size_type_node,
9947 NULL_TREE);
9948 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9949 "__builtin_memcmp_eq",
9950 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9952 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9953 "__builtin_strncmp_eq",
9954 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9956 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9957 "__builtin_strcmp_eq",
9958 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9960 /* If there's a possibility that we might use the ARM EABI, build the
9961 alternate __cxa_end_cleanup node used to resume from C++. */
9962 if (targetm.arm_eabi_unwinder)
9964 ftype = build_function_type_list (void_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9966 BUILT_IN_CXA_END_CLEANUP,
9967 "__cxa_end_cleanup",
9968 ECF_NORETURN | ECF_XTHROW | ECF_LEAF);
9971 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9972 local_define_builtin ("__builtin_unwind_resume", ftype,
9973 BUILT_IN_UNWIND_RESUME,
9974 ((targetm_common.except_unwind_info (&global_options)
9975 == UI_SJLJ)
9976 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9977 ECF_NORETURN | ECF_XTHROW);
9979 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9981 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9982 NULL_TREE);
9983 local_define_builtin ("__builtin_return_address", ftype,
9984 BUILT_IN_RETURN_ADDRESS,
9985 "__builtin_return_address",
9986 ECF_NOTHROW);
9989 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9990 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9992 ftype = build_function_type_list (void_type_node, ptr_type_node,
9993 ptr_type_node, NULL_TREE);
9994 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9995 local_define_builtin ("__cyg_profile_func_enter", ftype,
9996 BUILT_IN_PROFILE_FUNC_ENTER,
9997 "__cyg_profile_func_enter", 0);
9998 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9999 local_define_builtin ("__cyg_profile_func_exit", ftype,
10000 BUILT_IN_PROFILE_FUNC_EXIT,
10001 "__cyg_profile_func_exit", 0);
10004 /* The exception object and filter values from the runtime. The argument
10005 must be zero before exception lowering, i.e. from the front end. After
10006 exception lowering, it will be the region number for the exception
10007 landing pad. These functions are PURE instead of CONST to prevent
10008 them from being hoisted past the exception edge that will initialize
10009 its value in the landing pad. */
10010 ftype = build_function_type_list (ptr_type_node,
10011 integer_type_node, NULL_TREE);
10012 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10013 /* Only use TM_PURE if we have TM language support. */
10014 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10015 ecf_flags |= ECF_TM_PURE;
10016 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10017 "__builtin_eh_pointer", ecf_flags);
10019 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10020 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10022 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10024 ftype = build_function_type_list (void_type_node,
10025 integer_type_node, integer_type_node,
10026 NULL_TREE);
10027 local_define_builtin ("__builtin_eh_copy_values", ftype,
10028 BUILT_IN_EH_COPY_VALUES,
10029 "__builtin_eh_copy_values", ECF_NOTHROW);
10031 /* Complex multiplication and division. These are handled as builtins
10032 rather than optabs because emit_library_call_value doesn't support
10033 complex. Further, we can do slightly better with folding these
10034 beasties if the real and complex parts of the arguments are separate. */
10036 int mode;
10038 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10040 char mode_name_buf[4], *q;
10041 const char *p;
10042 enum built_in_function mcode, dcode;
10043 tree type, inner_type;
10044 const char *prefix = "__";
10046 if (targetm.libfunc_gnu_prefix)
10047 prefix = "__gnu_";
10049 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10050 if (type == NULL)
10051 continue;
10052 inner_type = TREE_TYPE (type);
10054 ftype = build_function_type_list (type, inner_type, inner_type,
10055 inner_type, inner_type, NULL_TREE);
10057 mcode = ((enum built_in_function)
10058 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10059 dcode = ((enum built_in_function)
10060 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10062 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10063 *q = TOLOWER (*p);
10064 *q = '\0';
10066 /* For -ftrapping-math these should throw from a former
10067 -fnon-call-exception stmt. */
10068 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10069 NULL);
10070 local_define_builtin (built_in_names[mcode], ftype, mcode,
10071 built_in_names[mcode],
10072 ECF_CONST | ECF_LEAF);
10074 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10075 NULL);
10076 local_define_builtin (built_in_names[dcode], ftype, dcode,
10077 built_in_names[dcode],
10078 ECF_CONST | ECF_LEAF);
10082 init_internal_fns ();
10085 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10086 better way.
10088 If we requested a pointer to a vector, build up the pointers that
10089 we stripped off while looking for the inner type. Similarly for
10090 return values from functions.
10092 The argument TYPE is the top of the chain, and BOTTOM is the
10093 new type which we will point to. */
10095 tree
10096 reconstruct_complex_type (tree type, tree bottom)
10098 tree inner, outer;
10100 if (TREE_CODE (type) == POINTER_TYPE)
10102 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10103 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10104 TYPE_REF_CAN_ALIAS_ALL (type));
10106 else if (TREE_CODE (type) == REFERENCE_TYPE)
10108 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10109 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10110 TYPE_REF_CAN_ALIAS_ALL (type));
10112 else if (TREE_CODE (type) == ARRAY_TYPE)
10114 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10115 outer = build_array_type (inner, TYPE_DOMAIN (type));
10117 else if (TREE_CODE (type) == FUNCTION_TYPE)
10119 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10120 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10121 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10123 else if (TREE_CODE (type) == METHOD_TYPE)
10125 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10126 /* The build_method_type_directly() routine prepends 'this' to argument list,
10127 so we must compensate by getting rid of it. */
10128 outer
10129 = build_method_type_directly
10130 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10131 inner,
10132 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10134 else if (TREE_CODE (type) == OFFSET_TYPE)
10136 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10137 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10139 else
10140 return bottom;
10142 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10143 TYPE_QUALS (type));
10146 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10147 the inner type. */
10148 tree
10149 build_vector_type_for_mode (tree innertype, machine_mode mode)
10151 poly_int64 nunits;
10152 unsigned int bitsize;
10154 switch (GET_MODE_CLASS (mode))
10156 case MODE_VECTOR_BOOL:
10157 case MODE_VECTOR_INT:
10158 case MODE_VECTOR_FLOAT:
10159 case MODE_VECTOR_FRACT:
10160 case MODE_VECTOR_UFRACT:
10161 case MODE_VECTOR_ACCUM:
10162 case MODE_VECTOR_UACCUM:
10163 nunits = GET_MODE_NUNITS (mode);
10164 break;
10166 case MODE_INT:
10167 /* Check that there are no leftover bits. */
10168 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10169 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10170 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10171 break;
10173 default:
10174 gcc_unreachable ();
10177 return make_vector_type (innertype, nunits, mode);
10180 /* Similarly, but takes the inner type and number of units, which must be
10181 a power of two. */
10183 tree
10184 build_vector_type (tree innertype, poly_int64 nunits)
10186 return make_vector_type (innertype, nunits, VOIDmode);
10189 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10191 tree
10192 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10194 gcc_assert (mask_mode != BLKmode);
10196 unsigned HOST_WIDE_INT esize;
10197 if (VECTOR_MODE_P (mask_mode))
10199 poly_uint64 vsize = GET_MODE_PRECISION (mask_mode);
10200 esize = vector_element_size (vsize, nunits);
10202 else
10203 esize = 1;
10205 tree bool_type = build_nonstandard_boolean_type (esize);
10207 return make_vector_type (bool_type, nunits, mask_mode);
10210 /* Build a vector type that holds one boolean result for each element of
10211 vector type VECTYPE. The public interface for this operation is
10212 truth_type_for. */
10214 static tree
10215 build_truth_vector_type_for (tree vectype)
10217 machine_mode vector_mode = TYPE_MODE (vectype);
10218 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10220 machine_mode mask_mode;
10221 if (VECTOR_MODE_P (vector_mode)
10222 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10223 return build_truth_vector_type_for_mode (nunits, mask_mode);
10225 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10226 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10227 tree bool_type = build_nonstandard_boolean_type (esize);
10229 return make_vector_type (bool_type, nunits, VOIDmode);
10232 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10233 set. */
10235 tree
10236 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10238 tree t = make_vector_type (innertype, nunits, VOIDmode);
10239 tree cand;
10240 /* We always build the non-opaque variant before the opaque one,
10241 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10242 cand = TYPE_NEXT_VARIANT (t);
10243 if (cand
10244 && TYPE_VECTOR_OPAQUE (cand)
10245 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10246 return cand;
10247 /* Othewise build a variant type and make sure to queue it after
10248 the non-opaque type. */
10249 cand = build_distinct_type_copy (t);
10250 TYPE_VECTOR_OPAQUE (cand) = true;
10251 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10252 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10253 TYPE_NEXT_VARIANT (t) = cand;
10254 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10255 return cand;
10258 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10260 static poly_wide_int
10261 vector_cst_int_elt (const_tree t, unsigned int i)
10263 /* First handle elements that are directly encoded. */
10264 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10265 if (i < encoded_nelts)
10266 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10268 /* Identify the pattern that contains element I and work out the index of
10269 the last encoded element for that pattern. */
10270 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10271 unsigned int pattern = i % npatterns;
10272 unsigned int count = i / npatterns;
10273 unsigned int final_i = encoded_nelts - npatterns + pattern;
10275 /* If there are no steps, the final encoded value is the right one. */
10276 if (!VECTOR_CST_STEPPED_P (t))
10277 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10279 /* Otherwise work out the value from the last two encoded elements. */
10280 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10281 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10282 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10283 return wi::to_poly_wide (v2) + (count - 2) * diff;
10286 /* Return the value of element I of VECTOR_CST T. */
10288 tree
10289 vector_cst_elt (const_tree t, unsigned int i)
10291 /* First handle elements that are directly encoded. */
10292 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10293 if (i < encoded_nelts)
10294 return VECTOR_CST_ENCODED_ELT (t, i);
10296 /* If there are no steps, the final encoded value is the right one. */
10297 if (!VECTOR_CST_STEPPED_P (t))
10299 /* Identify the pattern that contains element I and work out the index of
10300 the last encoded element for that pattern. */
10301 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10302 unsigned int pattern = i % npatterns;
10303 unsigned int final_i = encoded_nelts - npatterns + pattern;
10304 return VECTOR_CST_ENCODED_ELT (t, final_i);
10307 /* Otherwise work out the value from the last two encoded elements. */
10308 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10309 vector_cst_int_elt (t, i));
10312 /* Given an initializer INIT, return TRUE if INIT is zero or some
10313 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10314 null, set *NONZERO if and only if INIT is known not to be all
10315 zeros. The combination of return value of false and *NONZERO
10316 false implies that INIT may but need not be all zeros. Other
10317 combinations indicate definitive answers. */
10319 bool
10320 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10322 bool dummy;
10323 if (!nonzero)
10324 nonzero = &dummy;
10326 /* Conservatively clear NONZERO and set it only if INIT is definitely
10327 not all zero. */
10328 *nonzero = false;
10330 STRIP_NOPS (init);
10332 unsigned HOST_WIDE_INT off = 0;
10334 switch (TREE_CODE (init))
10336 case INTEGER_CST:
10337 if (integer_zerop (init))
10338 return true;
10340 *nonzero = true;
10341 return false;
10343 case REAL_CST:
10344 /* ??? Note that this is not correct for C4X float formats. There,
10345 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10346 negative exponent. */
10347 if (real_zerop (init)
10348 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10349 return true;
10351 *nonzero = true;
10352 return false;
10354 case FIXED_CST:
10355 if (fixed_zerop (init))
10356 return true;
10358 *nonzero = true;
10359 return false;
10361 case COMPLEX_CST:
10362 if (integer_zerop (init)
10363 || (real_zerop (init)
10364 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10365 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10366 return true;
10368 *nonzero = true;
10369 return false;
10371 case VECTOR_CST:
10372 if (VECTOR_CST_NPATTERNS (init) == 1
10373 && VECTOR_CST_DUPLICATE_P (init)
10374 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10375 return true;
10377 *nonzero = true;
10378 return false;
10380 case CONSTRUCTOR:
10382 if (TREE_CLOBBER_P (init))
10383 return false;
10385 unsigned HOST_WIDE_INT idx;
10386 tree elt;
10388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10389 if (!initializer_zerop (elt, nonzero))
10390 return false;
10392 return true;
10395 case MEM_REF:
10397 tree arg = TREE_OPERAND (init, 0);
10398 if (TREE_CODE (arg) != ADDR_EXPR)
10399 return false;
10400 tree offset = TREE_OPERAND (init, 1);
10401 if (TREE_CODE (offset) != INTEGER_CST
10402 || !tree_fits_uhwi_p (offset))
10403 return false;
10404 off = tree_to_uhwi (offset);
10405 if (INT_MAX < off)
10406 return false;
10407 arg = TREE_OPERAND (arg, 0);
10408 if (TREE_CODE (arg) != STRING_CST)
10409 return false;
10410 init = arg;
10412 /* Fall through. */
10414 case STRING_CST:
10416 gcc_assert (off <= INT_MAX);
10418 int i = off;
10419 int n = TREE_STRING_LENGTH (init);
10420 if (n <= i)
10421 return false;
10423 /* We need to loop through all elements to handle cases like
10424 "\0" and "\0foobar". */
10425 for (i = 0; i < n; ++i)
10426 if (TREE_STRING_POINTER (init)[i] != '\0')
10428 *nonzero = true;
10429 return false;
10432 return true;
10435 default:
10436 return false;
10440 /* Return true if EXPR is an initializer expression in which every element
10441 is a constant that is numerically equal to 0 or 1. The elements do not
10442 need to be equal to each other. */
10444 bool
10445 initializer_each_zero_or_onep (const_tree expr)
10447 STRIP_ANY_LOCATION_WRAPPER (expr);
10449 switch (TREE_CODE (expr))
10451 case INTEGER_CST:
10452 return integer_zerop (expr) || integer_onep (expr);
10454 case REAL_CST:
10455 return real_zerop (expr) || real_onep (expr);
10457 case VECTOR_CST:
10459 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10460 if (VECTOR_CST_STEPPED_P (expr)
10461 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10462 return false;
10464 for (unsigned int i = 0; i < nelts; ++i)
10466 tree elt = vector_cst_elt (expr, i);
10467 if (!initializer_each_zero_or_onep (elt))
10468 return false;
10471 return true;
10474 default:
10475 return false;
10479 /* Check if vector VEC consists of all the equal elements and
10480 that the number of elements corresponds to the type of VEC.
10481 The function returns first element of the vector
10482 or NULL_TREE if the vector is not uniform. */
10483 tree
10484 uniform_vector_p (const_tree vec)
10486 tree first, t;
10487 unsigned HOST_WIDE_INT i, nelts;
10489 if (vec == NULL_TREE)
10490 return NULL_TREE;
10492 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10494 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10495 return TREE_OPERAND (vec, 0);
10497 else if (TREE_CODE (vec) == VECTOR_CST)
10499 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10500 return VECTOR_CST_ENCODED_ELT (vec, 0);
10501 return NULL_TREE;
10504 else if (TREE_CODE (vec) == CONSTRUCTOR
10505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10507 first = error_mark_node;
10509 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10511 if (i == 0)
10513 first = t;
10514 continue;
10516 if (!operand_equal_p (first, t, 0))
10517 return NULL_TREE;
10519 if (i != nelts)
10520 return NULL_TREE;
10522 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10523 return uniform_vector_p (first);
10524 return first;
10527 return NULL_TREE;
10530 /* If the argument is INTEGER_CST, return it. If the argument is vector
10531 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10532 return NULL_TREE.
10533 Look through location wrappers. */
10535 tree
10536 uniform_integer_cst_p (tree t)
10538 STRIP_ANY_LOCATION_WRAPPER (t);
10540 if (TREE_CODE (t) == INTEGER_CST)
10541 return t;
10543 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10545 t = uniform_vector_p (t);
10546 if (t && TREE_CODE (t) == INTEGER_CST)
10547 return t;
10550 return NULL_TREE;
10553 /* Checks to see if T is a constant or a constant vector and if each element E
10554 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10556 tree
10557 bitmask_inv_cst_vector_p (tree t)
10560 tree_code code = TREE_CODE (t);
10561 tree type = TREE_TYPE (t);
10563 if (!INTEGRAL_TYPE_P (type)
10564 && !VECTOR_INTEGER_TYPE_P (type))
10565 return NULL_TREE;
10567 unsigned HOST_WIDE_INT nelts = 1;
10568 tree cst;
10569 unsigned int idx = 0;
10570 bool uniform = uniform_integer_cst_p (t);
10571 tree newtype = unsigned_type_for (type);
10572 tree_vector_builder builder;
10573 if (code == INTEGER_CST)
10574 cst = t;
10575 else
10577 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10578 return NULL_TREE;
10580 cst = vector_cst_elt (t, 0);
10581 builder.new_vector (newtype, nelts, 1);
10584 tree ty = unsigned_type_for (TREE_TYPE (cst));
10588 if (idx > 0)
10589 cst = vector_cst_elt (t, idx);
10590 wide_int icst = wi::to_wide (cst);
10591 wide_int inv = wi::bit_not (icst);
10592 icst = wi::add (1, inv);
10593 if (wi::popcount (icst) != 1)
10594 return NULL_TREE;
10596 tree newcst = wide_int_to_tree (ty, inv);
10598 if (uniform)
10599 return build_uniform_cst (newtype, newcst);
10601 builder.quick_push (newcst);
10603 while (++idx < nelts);
10605 return builder.build ();
10608 /* If VECTOR_CST T has a single nonzero element, return the index of that
10609 element, otherwise return -1. */
10612 single_nonzero_element (const_tree t)
10614 unsigned HOST_WIDE_INT nelts;
10615 unsigned int repeat_nelts;
10616 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10617 repeat_nelts = nelts;
10618 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10620 nelts = vector_cst_encoded_nelts (t);
10621 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10623 else
10624 return -1;
10626 int res = -1;
10627 for (unsigned int i = 0; i < nelts; ++i)
10629 tree elt = vector_cst_elt (t, i);
10630 if (!integer_zerop (elt) && !real_zerop (elt))
10632 if (res >= 0 || i >= repeat_nelts)
10633 return -1;
10634 res = i;
10637 return res;
10640 /* Build an empty statement at location LOC. */
10642 tree
10643 build_empty_stmt (location_t loc)
10645 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10646 SET_EXPR_LOCATION (t, loc);
10647 return t;
10651 /* Build an OMP clause with code CODE. LOC is the location of the
10652 clause. */
10654 tree
10655 build_omp_clause (location_t loc, enum omp_clause_code code)
10657 tree t;
10658 int size, length;
10660 length = omp_clause_num_ops[code];
10661 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10663 record_node_allocation_statistics (OMP_CLAUSE, size);
10665 t = (tree) ggc_internal_alloc (size);
10666 memset (t, 0, size);
10667 TREE_SET_CODE (t, OMP_CLAUSE);
10668 OMP_CLAUSE_SET_CODE (t, code);
10669 OMP_CLAUSE_LOCATION (t) = loc;
10671 return t;
10674 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10675 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10676 Except for the CODE and operand count field, other storage for the
10677 object is initialized to zeros. */
10679 tree
10680 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10682 tree t;
10683 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10685 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10686 gcc_assert (len >= 1);
10688 record_node_allocation_statistics (code, length);
10690 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10692 TREE_SET_CODE (t, code);
10694 /* Can't use TREE_OPERAND to store the length because if checking is
10695 enabled, it will try to check the length before we store it. :-P */
10696 t->exp.operands[0] = build_int_cst (sizetype, len);
10698 return t;
10701 /* Helper function for build_call_* functions; build a CALL_EXPR with
10702 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10703 the argument slots. */
10705 static tree
10706 build_call_1 (tree return_type, tree fn, int nargs)
10708 tree t;
10710 t = build_vl_exp (CALL_EXPR, nargs + 3);
10711 TREE_TYPE (t) = return_type;
10712 CALL_EXPR_FN (t) = fn;
10713 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10715 return t;
10718 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10719 FN and a null static chain slot. NARGS is the number of call arguments
10720 which are specified as "..." arguments. */
10722 tree
10723 build_call_nary (tree return_type, tree fn, int nargs, ...)
10725 tree ret;
10726 va_list args;
10727 va_start (args, nargs);
10728 ret = build_call_valist (return_type, fn, nargs, args);
10729 va_end (args);
10730 return ret;
10733 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10734 FN and a null static chain slot. NARGS is the number of call arguments
10735 which are specified as a va_list ARGS. */
10737 tree
10738 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10740 tree t;
10741 int i;
10743 t = build_call_1 (return_type, fn, nargs);
10744 for (i = 0; i < nargs; i++)
10745 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10746 process_call_operands (t);
10747 return t;
10750 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10751 FN and a null static chain slot. NARGS is the number of call arguments
10752 which are specified as a tree array ARGS. */
10754 tree
10755 build_call_array_loc (location_t loc, tree return_type, tree fn,
10756 int nargs, const tree *args)
10758 tree t;
10759 int i;
10761 t = build_call_1 (return_type, fn, nargs);
10762 for (i = 0; i < nargs; i++)
10763 CALL_EXPR_ARG (t, i) = args[i];
10764 process_call_operands (t);
10765 SET_EXPR_LOCATION (t, loc);
10766 return t;
10769 /* Like build_call_array, but takes a vec. */
10771 tree
10772 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10774 tree ret, t;
10775 unsigned int ix;
10777 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10778 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10779 CALL_EXPR_ARG (ret, ix) = t;
10780 process_call_operands (ret);
10781 return ret;
10784 /* Conveniently construct a function call expression. FNDECL names the
10785 function to be called and N arguments are passed in the array
10786 ARGARRAY. */
10788 tree
10789 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10791 tree fntype = TREE_TYPE (fndecl);
10792 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10794 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10797 /* Conveniently construct a function call expression. FNDECL names the
10798 function to be called and the arguments are passed in the vector
10799 VEC. */
10801 tree
10802 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10804 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10805 vec_safe_address (vec));
10809 /* Conveniently construct a function call expression. FNDECL names the
10810 function to be called, N is the number of arguments, and the "..."
10811 parameters are the argument expressions. */
10813 tree
10814 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10816 va_list ap;
10817 tree *argarray = XALLOCAVEC (tree, n);
10818 int i;
10820 va_start (ap, n);
10821 for (i = 0; i < n; i++)
10822 argarray[i] = va_arg (ap, tree);
10823 va_end (ap);
10824 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10827 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10828 varargs macros aren't supported by all bootstrap compilers. */
10830 tree
10831 build_call_expr (tree fndecl, int n, ...)
10833 va_list ap;
10834 tree *argarray = XALLOCAVEC (tree, n);
10835 int i;
10837 va_start (ap, n);
10838 for (i = 0; i < n; i++)
10839 argarray[i] = va_arg (ap, tree);
10840 va_end (ap);
10841 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10844 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10845 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10846 It will get gimplified later into an ordinary internal function. */
10848 tree
10849 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10850 tree type, int n, const tree *args)
10852 tree t = build_call_1 (type, NULL_TREE, n);
10853 for (int i = 0; i < n; ++i)
10854 CALL_EXPR_ARG (t, i) = args[i];
10855 SET_EXPR_LOCATION (t, loc);
10856 CALL_EXPR_IFN (t) = ifn;
10857 process_call_operands (t);
10858 return t;
10861 /* Build internal call expression. This is just like CALL_EXPR, except
10862 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10863 internal function. */
10865 tree
10866 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10867 tree type, int n, ...)
10869 va_list ap;
10870 tree *argarray = XALLOCAVEC (tree, n);
10871 int i;
10873 va_start (ap, n);
10874 for (i = 0; i < n; i++)
10875 argarray[i] = va_arg (ap, tree);
10876 va_end (ap);
10877 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10880 /* Return a function call to FN, if the target is guaranteed to support it,
10881 or null otherwise.
10883 N is the number of arguments, passed in the "...", and TYPE is the
10884 type of the return value. */
10886 tree
10887 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10888 int n, ...)
10890 va_list ap;
10891 tree *argarray = XALLOCAVEC (tree, n);
10892 int i;
10894 va_start (ap, n);
10895 for (i = 0; i < n; i++)
10896 argarray[i] = va_arg (ap, tree);
10897 va_end (ap);
10898 if (internal_fn_p (fn))
10900 internal_fn ifn = as_internal_fn (fn);
10901 if (direct_internal_fn_p (ifn))
10903 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10904 if (!direct_internal_fn_supported_p (ifn, types,
10905 OPTIMIZE_FOR_BOTH))
10906 return NULL_TREE;
10908 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10910 else
10912 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10913 if (!fndecl)
10914 return NULL_TREE;
10915 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10919 /* Return a function call to the appropriate builtin alloca variant.
10921 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10922 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10923 bound for SIZE in case it is not a fixed value. */
10925 tree
10926 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10928 if (max_size >= 0)
10930 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10931 return
10932 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10934 else if (align > 0)
10936 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10937 return build_call_expr (t, 2, size, size_int (align));
10939 else
10941 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10942 return build_call_expr (t, 1, size);
10946 /* The built-in decl to use to mark code points believed to be unreachable.
10947 Typically __builtin_unreachable, but __builtin_trap if
10948 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10949 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10950 appropriate ubsan function. When building a call directly, use
10951 {gimple_},build_builtin_unreachable instead. */
10953 tree
10954 builtin_decl_unreachable ()
10956 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10958 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10959 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10960 : flag_unreachable_traps)
10961 fncode = BUILT_IN_UNREACHABLE_TRAP;
10962 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10963 in the sanopt pass. */
10965 return builtin_decl_explicit (fncode);
10968 /* Build a call to __builtin_unreachable, possibly rewritten by
10969 -fsanitize=unreachable. Use this rather than the above when practical. */
10971 tree
10972 build_builtin_unreachable (location_t loc)
10974 tree data = NULL_TREE;
10975 tree fn = sanitize_unreachable_fn (&data, loc);
10976 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10979 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10980 if SIZE == -1) and return a tree node representing char* pointer to
10981 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10982 the STRING_CST value is the LEN bytes at STR (the representation
10983 of the string, which may be wide). Otherwise it's all zeros. */
10985 tree
10986 build_string_literal (unsigned len, const char *str /* = NULL */,
10987 tree eltype /* = char_type_node */,
10988 unsigned HOST_WIDE_INT size /* = -1 */)
10990 tree t = build_string (len, str);
10991 /* Set the maximum valid index based on the string length or SIZE. */
10992 unsigned HOST_WIDE_INT maxidx
10993 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10995 tree index = build_index_type (size_int (maxidx));
10996 eltype = build_type_variant (eltype, 1, 0);
10997 tree type = build_array_type (eltype, index);
10998 TREE_TYPE (t) = type;
10999 TREE_CONSTANT (t) = 1;
11000 TREE_READONLY (t) = 1;
11001 TREE_STATIC (t) = 1;
11003 type = build_pointer_type (eltype);
11004 t = build1 (ADDR_EXPR, type,
11005 build4 (ARRAY_REF, eltype,
11006 t, integer_zero_node, NULL_TREE, NULL_TREE));
11007 return t;
11012 /* Return true if T (assumed to be a DECL) must be assigned a memory
11013 location. */
11015 bool
11016 needs_to_live_in_memory (const_tree t)
11018 return (TREE_ADDRESSABLE (t)
11019 || is_global_var (t)
11020 || (TREE_CODE (t) == RESULT_DECL
11021 && !DECL_BY_REFERENCE (t)
11022 && aggregate_value_p (t, current_function_decl)));
11025 /* Return value of a constant X and sign-extend it. */
11027 HOST_WIDE_INT
11028 int_cst_value (const_tree x)
11030 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11031 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11033 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11034 gcc_assert (cst_and_fits_in_hwi (x));
11036 if (bits < HOST_BITS_PER_WIDE_INT)
11038 bool negative = ((val >> (bits - 1)) & 1) != 0;
11039 if (negative)
11040 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11041 else
11042 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11045 return val;
11048 /* If TYPE is an integral or pointer type, return an integer type with
11049 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11050 if TYPE is already an integer type of signedness UNSIGNEDP.
11051 If TYPE is a floating-point type, return an integer type with the same
11052 bitsize and with the signedness given by UNSIGNEDP; this is useful
11053 when doing bit-level operations on a floating-point value. */
11055 tree
11056 signed_or_unsigned_type_for (int unsignedp, tree type)
11058 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11059 return type;
11061 if (TREE_CODE (type) == VECTOR_TYPE)
11063 tree inner = TREE_TYPE (type);
11064 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11065 if (!inner2)
11066 return NULL_TREE;
11067 if (inner == inner2)
11068 return type;
11069 machine_mode new_mode;
11070 if (VECTOR_MODE_P (TYPE_MODE (type))
11071 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11072 return build_vector_type_for_mode (inner2, new_mode);
11073 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11076 if (TREE_CODE (type) == COMPLEX_TYPE)
11078 tree inner = TREE_TYPE (type);
11079 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11080 if (!inner2)
11081 return NULL_TREE;
11082 if (inner == inner2)
11083 return type;
11084 return build_complex_type (inner2);
11087 unsigned int bits;
11088 if (INTEGRAL_TYPE_P (type)
11089 || POINTER_TYPE_P (type)
11090 || TREE_CODE (type) == OFFSET_TYPE)
11091 bits = TYPE_PRECISION (type);
11092 else if (TREE_CODE (type) == REAL_TYPE)
11093 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11094 else
11095 return NULL_TREE;
11097 if (TREE_CODE (type) == BITINT_TYPE && (unsignedp || bits > 1))
11098 return build_bitint_type (bits, unsignedp);
11099 return build_nonstandard_integer_type (bits, unsignedp);
11102 /* If TYPE is an integral or pointer type, return an integer type with
11103 the same precision which is unsigned, or itself if TYPE is already an
11104 unsigned integer type. If TYPE is a floating-point type, return an
11105 unsigned integer type with the same bitsize as TYPE. */
11107 tree
11108 unsigned_type_for (tree type)
11110 return signed_or_unsigned_type_for (1, type);
11113 /* If TYPE is an integral or pointer type, return an integer type with
11114 the same precision which is signed, or itself if TYPE is already a
11115 signed integer type. If TYPE is a floating-point type, return a
11116 signed integer type with the same bitsize as TYPE. */
11118 tree
11119 signed_type_for (tree type)
11121 return signed_or_unsigned_type_for (0, type);
11124 /* - For VECTOR_TYPEs:
11125 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11126 - The number of elements must match (known_eq).
11127 - targetm.vectorize.get_mask_mode exists, and exactly
11128 the same mode as the truth type.
11129 - Otherwise, the truth type must be a BOOLEAN_TYPE
11130 or useless_type_conversion_p to BOOLEAN_TYPE. */
11131 bool
11132 is_truth_type_for (tree type, tree truth_type)
11134 machine_mode mask_mode = TYPE_MODE (truth_type);
11135 machine_mode vmode = TYPE_MODE (type);
11136 machine_mode tmask_mode;
11138 if (TREE_CODE (type) == VECTOR_TYPE)
11140 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11141 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11142 TYPE_VECTOR_SUBPARTS (truth_type))
11143 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11144 && tmask_mode == mask_mode)
11145 return true;
11147 return false;
11150 return useless_type_conversion_p (boolean_type_node, truth_type);
11153 /* If TYPE is a vector type, return a signed integer vector type with the
11154 same width and number of subparts. Otherwise return boolean_type_node. */
11156 tree
11157 truth_type_for (tree type)
11159 if (TREE_CODE (type) == VECTOR_TYPE)
11161 if (VECTOR_BOOLEAN_TYPE_P (type))
11162 return type;
11163 return build_truth_vector_type_for (type);
11165 else
11166 return boolean_type_node;
11169 /* Returns the largest value obtainable by casting something in INNER type to
11170 OUTER type. */
11172 tree
11173 upper_bound_in_type (tree outer, tree inner)
11175 unsigned int det = 0;
11176 unsigned oprec = TYPE_PRECISION (outer);
11177 unsigned iprec = TYPE_PRECISION (inner);
11178 unsigned prec;
11180 /* Compute a unique number for every combination. */
11181 det |= (oprec > iprec) ? 4 : 0;
11182 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11183 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11185 /* Determine the exponent to use. */
11186 switch (det)
11188 case 0:
11189 case 1:
11190 /* oprec <= iprec, outer: signed, inner: don't care. */
11191 prec = oprec - 1;
11192 break;
11193 case 2:
11194 case 3:
11195 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11196 prec = oprec;
11197 break;
11198 case 4:
11199 /* oprec > iprec, outer: signed, inner: signed. */
11200 prec = iprec - 1;
11201 break;
11202 case 5:
11203 /* oprec > iprec, outer: signed, inner: unsigned. */
11204 prec = iprec;
11205 break;
11206 case 6:
11207 /* oprec > iprec, outer: unsigned, inner: signed. */
11208 prec = oprec;
11209 break;
11210 case 7:
11211 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11212 prec = iprec;
11213 break;
11214 default:
11215 gcc_unreachable ();
11218 return wide_int_to_tree (outer,
11219 wi::mask (prec, false, TYPE_PRECISION (outer)));
11222 /* Returns the smallest value obtainable by casting something in INNER type to
11223 OUTER type. */
11225 tree
11226 lower_bound_in_type (tree outer, tree inner)
11228 unsigned oprec = TYPE_PRECISION (outer);
11229 unsigned iprec = TYPE_PRECISION (inner);
11231 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11232 and obtain 0. */
11233 if (TYPE_UNSIGNED (outer)
11234 /* If we are widening something of an unsigned type, OUTER type
11235 contains all values of INNER type. In particular, both INNER
11236 and OUTER types have zero in common. */
11237 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11238 return build_int_cst (outer, 0);
11239 else
11241 /* If we are widening a signed type to another signed type, we
11242 want to obtain -2^^(iprec-1). If we are keeping the
11243 precision or narrowing to a signed type, we want to obtain
11244 -2^(oprec-1). */
11245 unsigned prec = oprec > iprec ? iprec : oprec;
11246 return wide_int_to_tree (outer,
11247 wi::mask (prec - 1, true,
11248 TYPE_PRECISION (outer)));
11252 /* Return true if two operands that are suitable for PHI nodes are
11253 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11254 SSA_NAME or invariant. Note that this is strictly an optimization.
11255 That is, callers of this function can directly call operand_equal_p
11256 and get the same result, only slower. */
11258 bool
11259 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11261 if (arg0 == arg1)
11262 return true;
11263 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11264 return false;
11265 return operand_equal_p (arg0, arg1, 0);
11268 /* Returns number of zeros at the end of binary representation of X. */
11270 tree
11271 num_ending_zeros (const_tree x)
11273 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11277 #define WALK_SUBTREE(NODE) \
11278 do \
11280 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11281 if (result) \
11282 return result; \
11284 while (0)
11286 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11287 be walked whenever a type is seen in the tree. Rest of operands and return
11288 value are as for walk_tree. */
11290 static tree
11291 walk_type_fields (tree type, walk_tree_fn func, void *data,
11292 hash_set<tree> *pset, walk_tree_lh lh)
11294 tree result = NULL_TREE;
11296 switch (TREE_CODE (type))
11298 case POINTER_TYPE:
11299 case REFERENCE_TYPE:
11300 case VECTOR_TYPE:
11301 /* We have to worry about mutually recursive pointers. These can't
11302 be written in C. They can in Ada. It's pathological, but
11303 there's an ACATS test (c38102a) that checks it. Deal with this
11304 by checking if we're pointing to another pointer, that one
11305 points to another pointer, that one does too, and we have no htab.
11306 If so, get a hash table. We check three levels deep to avoid
11307 the cost of the hash table if we don't need one. */
11308 if (POINTER_TYPE_P (TREE_TYPE (type))
11309 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11310 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11311 && !pset)
11313 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11314 func, data);
11315 if (result)
11316 return result;
11318 break;
11321 /* fall through */
11323 case COMPLEX_TYPE:
11324 WALK_SUBTREE (TREE_TYPE (type));
11325 break;
11327 case METHOD_TYPE:
11328 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11330 /* Fall through. */
11332 case FUNCTION_TYPE:
11333 WALK_SUBTREE (TREE_TYPE (type));
11335 tree arg;
11337 /* We never want to walk into default arguments. */
11338 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11339 WALK_SUBTREE (TREE_VALUE (arg));
11341 break;
11343 case ARRAY_TYPE:
11344 /* Don't follow this nodes's type if a pointer for fear that
11345 we'll have infinite recursion. If we have a PSET, then we
11346 need not fear. */
11347 if (pset
11348 || (!POINTER_TYPE_P (TREE_TYPE (type))
11349 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11350 WALK_SUBTREE (TREE_TYPE (type));
11351 WALK_SUBTREE (TYPE_DOMAIN (type));
11352 break;
11354 case OFFSET_TYPE:
11355 WALK_SUBTREE (TREE_TYPE (type));
11356 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11357 break;
11359 default:
11360 break;
11363 return NULL_TREE;
11366 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11367 called with the DATA and the address of each sub-tree. If FUNC returns a
11368 non-NULL value, the traversal is stopped, and the value returned by FUNC
11369 is returned. If PSET is non-NULL it is used to record the nodes visited,
11370 and to avoid visiting a node more than once. */
11372 tree
11373 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11374 hash_set<tree> *pset, walk_tree_lh lh)
11376 #define WALK_SUBTREE_TAIL(NODE) \
11377 do \
11379 tp = & (NODE); \
11380 goto tail_recurse; \
11382 while (0)
11384 tail_recurse:
11385 /* Skip empty subtrees. */
11386 if (!*tp)
11387 return NULL_TREE;
11389 /* Don't walk the same tree twice, if the user has requested
11390 that we avoid doing so. */
11391 if (pset && pset->add (*tp))
11392 return NULL_TREE;
11394 /* Call the function. */
11395 int walk_subtrees = 1;
11396 tree result = (*func) (tp, &walk_subtrees, data);
11398 /* If we found something, return it. */
11399 if (result)
11400 return result;
11402 tree t = *tp;
11403 tree_code code = TREE_CODE (t);
11405 /* Even if we didn't, FUNC may have decided that there was nothing
11406 interesting below this point in the tree. */
11407 if (!walk_subtrees)
11409 /* But we still need to check our siblings. */
11410 if (code == TREE_LIST)
11411 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11412 else if (code == OMP_CLAUSE)
11413 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11414 else
11415 return NULL_TREE;
11418 if (lh)
11420 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11421 if (result || !walk_subtrees)
11422 return result;
11425 switch (code)
11427 case ERROR_MARK:
11428 case IDENTIFIER_NODE:
11429 case INTEGER_CST:
11430 case REAL_CST:
11431 case FIXED_CST:
11432 case STRING_CST:
11433 case BLOCK:
11434 case PLACEHOLDER_EXPR:
11435 case SSA_NAME:
11436 case FIELD_DECL:
11437 case RESULT_DECL:
11438 /* None of these have subtrees other than those already walked
11439 above. */
11440 break;
11442 case TREE_LIST:
11443 WALK_SUBTREE (TREE_VALUE (t));
11444 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11446 case TREE_VEC:
11448 int len = TREE_VEC_LENGTH (t);
11450 if (len == 0)
11451 break;
11453 /* Walk all elements but the last. */
11454 for (int i = 0; i < len - 1; ++i)
11455 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11457 /* Now walk the last one as a tail call. */
11458 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11461 case VECTOR_CST:
11463 unsigned len = vector_cst_encoded_nelts (t);
11464 if (len == 0)
11465 break;
11466 /* Walk all elements but the last. */
11467 for (unsigned i = 0; i < len - 1; ++i)
11468 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11469 /* Now walk the last one as a tail call. */
11470 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11473 case COMPLEX_CST:
11474 WALK_SUBTREE (TREE_REALPART (t));
11475 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11477 case CONSTRUCTOR:
11479 unsigned HOST_WIDE_INT idx;
11480 constructor_elt *ce;
11482 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11483 idx++)
11484 WALK_SUBTREE (ce->value);
11486 break;
11488 case SAVE_EXPR:
11489 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11491 case BIND_EXPR:
11493 tree decl;
11494 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11496 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11497 into declarations that are just mentioned, rather than
11498 declared; they don't really belong to this part of the tree.
11499 And, we can see cycles: the initializer for a declaration
11500 can refer to the declaration itself. */
11501 WALK_SUBTREE (DECL_INITIAL (decl));
11502 WALK_SUBTREE (DECL_SIZE (decl));
11503 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11505 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11508 case STATEMENT_LIST:
11510 tree_stmt_iterator i;
11511 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11512 WALK_SUBTREE (*tsi_stmt_ptr (i));
11514 break;
11516 case OMP_CLAUSE:
11518 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11519 for (int i = 0; i < len; i++)
11520 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11521 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11524 case TARGET_EXPR:
11526 int i, len;
11528 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11529 But, we only want to walk once. */
11530 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11531 for (i = 0; i < len; ++i)
11532 WALK_SUBTREE (TREE_OPERAND (t, i));
11533 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11536 case DECL_EXPR:
11537 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11538 defining. We only want to walk into these fields of a type in this
11539 case and not in the general case of a mere reference to the type.
11541 The criterion is as follows: if the field can be an expression, it
11542 must be walked only here. This should be in keeping with the fields
11543 that are directly gimplified in gimplify_type_sizes in order for the
11544 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11545 variable-sized types.
11547 Note that DECLs get walked as part of processing the BIND_EXPR. */
11548 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11550 /* Call the function for the decl so e.g. copy_tree_body_r can
11551 replace it with the remapped one. */
11552 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11553 if (result || !walk_subtrees)
11554 return result;
11556 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11557 if (TREE_CODE (*type_p) == ERROR_MARK)
11558 return NULL_TREE;
11560 /* Call the function for the type. See if it returns anything or
11561 doesn't want us to continue. If we are to continue, walk both
11562 the normal fields and those for the declaration case. */
11563 result = (*func) (type_p, &walk_subtrees, data);
11564 if (result || !walk_subtrees)
11565 return result;
11567 tree type = *type_p;
11569 /* But do not walk a pointed-to type since it may itself need to
11570 be walked in the declaration case if it isn't anonymous. */
11571 if (!POINTER_TYPE_P (type))
11573 result = walk_type_fields (type, func, data, pset, lh);
11574 if (result)
11575 return result;
11578 /* If this is a record type, also walk the fields. */
11579 if (RECORD_OR_UNION_TYPE_P (type))
11581 tree field;
11583 for (field = TYPE_FIELDS (type); field;
11584 field = DECL_CHAIN (field))
11586 /* We'd like to look at the type of the field, but we can
11587 easily get infinite recursion. So assume it's pointed
11588 to elsewhere in the tree. Also, ignore things that
11589 aren't fields. */
11590 if (TREE_CODE (field) != FIELD_DECL)
11591 continue;
11593 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11594 WALK_SUBTREE (DECL_SIZE (field));
11595 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11596 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11597 WALK_SUBTREE (DECL_QUALIFIER (field));
11601 /* Same for scalar types. */
11602 else if (TREE_CODE (type) == BOOLEAN_TYPE
11603 || TREE_CODE (type) == ENUMERAL_TYPE
11604 || TREE_CODE (type) == INTEGER_TYPE
11605 || TREE_CODE (type) == FIXED_POINT_TYPE
11606 || TREE_CODE (type) == REAL_TYPE)
11608 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11609 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11612 WALK_SUBTREE (TYPE_SIZE (type));
11613 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11615 /* FALLTHRU */
11617 default:
11618 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11620 int i, len;
11622 /* Walk over all the sub-trees of this operand. */
11623 len = TREE_OPERAND_LENGTH (t);
11625 /* Go through the subtrees. We need to do this in forward order so
11626 that the scope of a FOR_EXPR is handled properly. */
11627 if (len)
11629 for (i = 0; i < len - 1; ++i)
11630 WALK_SUBTREE (TREE_OPERAND (t, i));
11631 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11634 /* If this is a type, walk the needed fields in the type. */
11635 else if (TYPE_P (t))
11636 return walk_type_fields (t, func, data, pset, lh);
11637 break;
11640 /* We didn't find what we were looking for. */
11641 return NULL_TREE;
11643 #undef WALK_SUBTREE_TAIL
11645 #undef WALK_SUBTREE
11647 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11649 tree
11650 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11651 walk_tree_lh lh)
11653 tree result;
11655 hash_set<tree> pset;
11656 result = walk_tree_1 (tp, func, data, &pset, lh);
11657 return result;
11661 tree
11662 tree_block (tree t)
11664 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11666 if (IS_EXPR_CODE_CLASS (c))
11667 return LOCATION_BLOCK (t->exp.locus);
11668 gcc_unreachable ();
11669 return NULL;
11672 void
11673 tree_set_block (tree t, tree b)
11675 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11677 if (IS_EXPR_CODE_CLASS (c))
11679 t->exp.locus = set_block (t->exp.locus, b);
11681 else
11682 gcc_unreachable ();
11685 /* Create a nameless artificial label and put it in the current
11686 function context. The label has a location of LOC. Returns the
11687 newly created label. */
11689 tree
11690 create_artificial_label (location_t loc)
11692 tree lab = build_decl (loc,
11693 LABEL_DECL, NULL_TREE, void_type_node);
11695 DECL_ARTIFICIAL (lab) = 1;
11696 DECL_IGNORED_P (lab) = 1;
11697 DECL_CONTEXT (lab) = current_function_decl;
11698 return lab;
11701 /* Given a tree, try to return a useful variable name that we can use
11702 to prefix a temporary that is being assigned the value of the tree.
11703 I.E. given <temp> = &A, return A. */
11705 const char *
11706 get_name (tree t)
11708 tree stripped_decl;
11710 stripped_decl = t;
11711 STRIP_NOPS (stripped_decl);
11712 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11713 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11714 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11716 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11717 if (!name)
11718 return NULL;
11719 return IDENTIFIER_POINTER (name);
11721 else
11723 switch (TREE_CODE (stripped_decl))
11725 case ADDR_EXPR:
11726 return get_name (TREE_OPERAND (stripped_decl, 0));
11727 default:
11728 return NULL;
11733 /* Return true if TYPE has a variable argument list. */
11735 bool
11736 stdarg_p (const_tree fntype)
11738 function_args_iterator args_iter;
11739 tree n = NULL_TREE, t;
11741 if (!fntype)
11742 return false;
11744 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11745 return true;
11747 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11749 n = t;
11752 return n != NULL_TREE && n != void_type_node;
11755 /* Return true if TYPE has a prototype. */
11757 bool
11758 prototype_p (const_tree fntype)
11760 tree t;
11762 gcc_assert (fntype != NULL_TREE);
11764 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11765 return true;
11767 t = TYPE_ARG_TYPES (fntype);
11768 return (t != NULL_TREE);
11771 /* If BLOCK is inlined from an __attribute__((__artificial__))
11772 routine, return pointer to location from where it has been
11773 called. */
11774 location_t *
11775 block_nonartificial_location (tree block)
11777 location_t *ret = NULL;
11779 while (block && TREE_CODE (block) == BLOCK
11780 && BLOCK_ABSTRACT_ORIGIN (block))
11782 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11783 if (TREE_CODE (ao) == FUNCTION_DECL)
11785 /* If AO is an artificial inline, point RET to the
11786 call site locus at which it has been inlined and continue
11787 the loop, in case AO's caller is also an artificial
11788 inline. */
11789 if (DECL_DECLARED_INLINE_P (ao)
11790 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11791 ret = &BLOCK_SOURCE_LOCATION (block);
11792 else
11793 break;
11795 else if (TREE_CODE (ao) != BLOCK)
11796 break;
11798 block = BLOCK_SUPERCONTEXT (block);
11800 return ret;
11804 /* If EXP is inlined from an __attribute__((__artificial__))
11805 function, return the location of the original call expression. */
11807 location_t
11808 tree_nonartificial_location (tree exp)
11810 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11812 if (loc)
11813 return *loc;
11814 else
11815 return EXPR_LOCATION (exp);
11818 /* Return the location into which EXP has been inlined. Analogous
11819 to tree_nonartificial_location() above but not limited to artificial
11820 functions declared inline. If SYSTEM_HEADER is true, return
11821 the macro expansion point of the location if it's in a system header */
11823 location_t
11824 tree_inlined_location (tree exp, bool system_header /* = true */)
11826 location_t loc = UNKNOWN_LOCATION;
11828 tree block = TREE_BLOCK (exp);
11830 while (block && TREE_CODE (block) == BLOCK
11831 && BLOCK_ABSTRACT_ORIGIN (block))
11833 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11834 if (TREE_CODE (ao) == FUNCTION_DECL)
11835 loc = BLOCK_SOURCE_LOCATION (block);
11836 else if (TREE_CODE (ao) != BLOCK)
11837 break;
11839 block = BLOCK_SUPERCONTEXT (block);
11842 if (loc == UNKNOWN_LOCATION)
11844 loc = EXPR_LOCATION (exp);
11845 if (system_header)
11846 /* Only consider macro expansion when the block traversal failed
11847 to find a location. Otherwise it's not relevant. */
11848 return expansion_point_location_if_in_system_header (loc);
11851 return loc;
11854 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11855 nodes. */
11857 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11859 hashval_t
11860 cl_option_hasher::hash (tree x)
11862 const_tree const t = x;
11864 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11865 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11866 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11867 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11868 else
11869 gcc_unreachable ();
11872 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11873 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11874 same. */
11876 bool
11877 cl_option_hasher::equal (tree x, tree y)
11879 const_tree const xt = x;
11880 const_tree const yt = y;
11882 if (TREE_CODE (xt) != TREE_CODE (yt))
11883 return false;
11885 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11886 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11887 TREE_OPTIMIZATION (yt));
11888 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11889 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11890 TREE_TARGET_OPTION (yt));
11891 else
11892 gcc_unreachable ();
11895 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11897 tree
11898 build_optimization_node (struct gcc_options *opts,
11899 struct gcc_options *opts_set)
11901 tree t;
11903 /* Use the cache of optimization nodes. */
11905 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11906 opts, opts_set);
11908 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11909 t = *slot;
11910 if (!t)
11912 /* Insert this one into the hash table. */
11913 t = cl_optimization_node;
11914 *slot = t;
11916 /* Make a new node for next time round. */
11917 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11920 return t;
11923 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11925 tree
11926 build_target_option_node (struct gcc_options *opts,
11927 struct gcc_options *opts_set)
11929 tree t;
11931 /* Use the cache of optimization nodes. */
11933 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11934 opts, opts_set);
11936 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11937 t = *slot;
11938 if (!t)
11940 /* Insert this one into the hash table. */
11941 t = cl_target_option_node;
11942 *slot = t;
11944 /* Make a new node for next time round. */
11945 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11948 return t;
11951 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11952 so that they aren't saved during PCH writing. */
11954 void
11955 prepare_target_option_nodes_for_pch (void)
11957 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11958 for (; iter != cl_option_hash_table->end (); ++iter)
11959 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11960 TREE_TARGET_GLOBALS (*iter) = NULL;
11963 /* Determine the "ultimate origin" of a block. */
11965 tree
11966 block_ultimate_origin (const_tree block)
11968 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11970 if (origin == NULL_TREE)
11971 return NULL_TREE;
11972 else
11974 gcc_checking_assert ((DECL_P (origin)
11975 && DECL_ORIGIN (origin) == origin)
11976 || BLOCK_ORIGIN (origin) == origin);
11977 return origin;
11981 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11982 no instruction. */
11984 bool
11985 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11987 /* Do not strip casts into or out of differing address spaces. */
11988 if (POINTER_TYPE_P (outer_type)
11989 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11991 if (!POINTER_TYPE_P (inner_type)
11992 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11993 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11994 return false;
11996 else if (POINTER_TYPE_P (inner_type)
11997 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11999 /* We already know that outer_type is not a pointer with
12000 a non-generic address space. */
12001 return false;
12004 /* Use precision rather then machine mode when we can, which gives
12005 the correct answer even for submode (bit-field) types. */
12006 if ((INTEGRAL_TYPE_P (outer_type)
12007 || POINTER_TYPE_P (outer_type)
12008 || TREE_CODE (outer_type) == OFFSET_TYPE)
12009 && (INTEGRAL_TYPE_P (inner_type)
12010 || POINTER_TYPE_P (inner_type)
12011 || TREE_CODE (inner_type) == OFFSET_TYPE))
12012 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12014 /* Otherwise fall back on comparing machine modes (e.g. for
12015 aggregate types, floats). */
12016 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12019 /* Return true iff conversion in EXP generates no instruction. Mark
12020 it inline so that we fully inline into the stripping functions even
12021 though we have two uses of this function. */
12023 static inline bool
12024 tree_nop_conversion (const_tree exp)
12026 tree outer_type, inner_type;
12028 if (location_wrapper_p (exp))
12029 return true;
12030 if (!CONVERT_EXPR_P (exp)
12031 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12032 return false;
12034 outer_type = TREE_TYPE (exp);
12035 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12036 if (!inner_type || inner_type == error_mark_node)
12037 return false;
12039 return tree_nop_conversion_p (outer_type, inner_type);
12042 /* Return true iff conversion in EXP generates no instruction. Don't
12043 consider conversions changing the signedness. */
12045 static bool
12046 tree_sign_nop_conversion (const_tree exp)
12048 tree outer_type, inner_type;
12050 if (!tree_nop_conversion (exp))
12051 return false;
12053 outer_type = TREE_TYPE (exp);
12054 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12056 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12057 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12060 /* Strip conversions from EXP according to tree_nop_conversion and
12061 return the resulting expression. */
12063 tree
12064 tree_strip_nop_conversions (tree exp)
12066 while (tree_nop_conversion (exp))
12067 exp = TREE_OPERAND (exp, 0);
12068 return exp;
12071 /* Strip conversions from EXP according to tree_sign_nop_conversion
12072 and return the resulting expression. */
12074 tree
12075 tree_strip_sign_nop_conversions (tree exp)
12077 while (tree_sign_nop_conversion (exp))
12078 exp = TREE_OPERAND (exp, 0);
12079 return exp;
12082 /* Avoid any floating point extensions from EXP. */
12083 tree
12084 strip_float_extensions (tree exp)
12086 tree sub, expt, subt;
12088 /* For floating point constant look up the narrowest type that can hold
12089 it properly and handle it like (type)(narrowest_type)constant.
12090 This way we can optimize for instance a=a*2.0 where "a" is float
12091 but 2.0 is double constant. */
12092 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12094 REAL_VALUE_TYPE orig;
12095 tree type = NULL;
12097 orig = TREE_REAL_CST (exp);
12098 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12099 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12100 type = float_type_node;
12101 else if (TYPE_PRECISION (TREE_TYPE (exp))
12102 > TYPE_PRECISION (double_type_node)
12103 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12104 type = double_type_node;
12105 if (type)
12106 return build_real_truncate (type, orig);
12109 if (!CONVERT_EXPR_P (exp))
12110 return exp;
12112 sub = TREE_OPERAND (exp, 0);
12113 subt = TREE_TYPE (sub);
12114 expt = TREE_TYPE (exp);
12116 if (!FLOAT_TYPE_P (subt))
12117 return exp;
12119 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12120 return exp;
12122 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12123 return exp;
12125 return strip_float_extensions (sub);
12128 /* Strip out all handled components that produce invariant
12129 offsets. */
12131 const_tree
12132 strip_invariant_refs (const_tree op)
12134 while (handled_component_p (op))
12136 switch (TREE_CODE (op))
12138 case ARRAY_REF:
12139 case ARRAY_RANGE_REF:
12140 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12141 || TREE_OPERAND (op, 2) != NULL_TREE
12142 || TREE_OPERAND (op, 3) != NULL_TREE)
12143 return NULL;
12144 break;
12146 case COMPONENT_REF:
12147 if (TREE_OPERAND (op, 2) != NULL_TREE)
12148 return NULL;
12149 break;
12151 default:;
12153 op = TREE_OPERAND (op, 0);
12156 return op;
12159 /* Strip handled components with zero offset from OP. */
12161 tree
12162 strip_zero_offset_components (tree op)
12164 while (TREE_CODE (op) == COMPONENT_REF
12165 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12166 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12167 op = TREE_OPERAND (op, 0);
12168 return op;
12171 static GTY(()) tree gcc_eh_personality_decl;
12173 /* Return the GCC personality function decl. */
12175 tree
12176 lhd_gcc_personality (void)
12178 if (!gcc_eh_personality_decl)
12179 gcc_eh_personality_decl = build_personality_function ("gcc");
12180 return gcc_eh_personality_decl;
12183 /* TARGET is a call target of GIMPLE call statement
12184 (obtained by gimple_call_fn). Return true if it is
12185 OBJ_TYPE_REF representing an virtual call of C++ method.
12186 (As opposed to OBJ_TYPE_REF representing objc calls
12187 through a cast where middle-end devirtualization machinery
12188 can't apply.) FOR_DUMP_P is true when being called from
12189 the dump routines. */
12191 bool
12192 virtual_method_call_p (const_tree target, bool for_dump_p)
12194 if (TREE_CODE (target) != OBJ_TYPE_REF)
12195 return false;
12196 tree t = TREE_TYPE (target);
12197 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12198 t = TREE_TYPE (t);
12199 if (TREE_CODE (t) == FUNCTION_TYPE)
12200 return false;
12201 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12202 /* If we do not have BINFO associated, it means that type was built
12203 without devirtualization enabled. Do not consider this a virtual
12204 call. */
12205 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12206 return false;
12207 return true;
12210 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12212 static tree
12213 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12215 unsigned int i;
12216 tree base_binfo, b;
12218 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12219 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12220 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12221 return base_binfo;
12222 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12223 return b;
12224 return NULL;
12227 /* Try to find a base info of BINFO that would have its field decl at offset
12228 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12229 found, return, otherwise return NULL_TREE. */
12231 tree
12232 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12234 tree type = BINFO_TYPE (binfo);
12236 while (true)
12238 HOST_WIDE_INT pos, size;
12239 tree fld;
12240 int i;
12242 if (types_same_for_odr (type, expected_type))
12243 return binfo;
12244 if (maybe_lt (offset, 0))
12245 return NULL_TREE;
12247 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12249 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12250 continue;
12252 pos = int_bit_position (fld);
12253 size = tree_to_uhwi (DECL_SIZE (fld));
12254 if (known_in_range_p (offset, pos, size))
12255 break;
12257 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12258 return NULL_TREE;
12260 /* Offset 0 indicates the primary base, whose vtable contents are
12261 represented in the binfo for the derived class. */
12262 else if (maybe_ne (offset, 0))
12264 tree found_binfo = NULL, base_binfo;
12265 /* Offsets in BINFO are in bytes relative to the whole structure
12266 while POS is in bits relative to the containing field. */
12267 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12268 / BITS_PER_UNIT);
12270 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12271 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12272 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12274 found_binfo = base_binfo;
12275 break;
12277 if (found_binfo)
12278 binfo = found_binfo;
12279 else
12280 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12281 binfo_offset);
12284 type = TREE_TYPE (fld);
12285 offset -= pos;
12289 /* PR 84195: Replace control characters in "unescaped" with their
12290 escaped equivalents. Allow newlines if -fmessage-length has
12291 been set to a non-zero value. This is done here, rather than
12292 where the attribute is recorded as the message length can
12293 change between these two locations. */
12295 void
12296 escaped_string::escape (const char *unescaped)
12298 char *escaped;
12299 size_t i, new_i, len;
12301 if (m_owned)
12302 free (m_str);
12304 m_str = const_cast<char *> (unescaped);
12305 m_owned = false;
12307 if (unescaped == NULL || *unescaped == 0)
12308 return;
12310 len = strlen (unescaped);
12311 escaped = NULL;
12312 new_i = 0;
12314 for (i = 0; i < len; i++)
12316 char c = unescaped[i];
12318 if (!ISCNTRL (c))
12320 if (escaped)
12321 escaped[new_i++] = c;
12322 continue;
12325 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12327 if (escaped == NULL)
12329 /* We only allocate space for a new string if we
12330 actually encounter a control character that
12331 needs replacing. */
12332 escaped = (char *) xmalloc (len * 2 + 1);
12333 strncpy (escaped, unescaped, i);
12334 new_i = i;
12337 escaped[new_i++] = '\\';
12339 switch (c)
12341 case '\a': escaped[new_i++] = 'a'; break;
12342 case '\b': escaped[new_i++] = 'b'; break;
12343 case '\f': escaped[new_i++] = 'f'; break;
12344 case '\n': escaped[new_i++] = 'n'; break;
12345 case '\r': escaped[new_i++] = 'r'; break;
12346 case '\t': escaped[new_i++] = 't'; break;
12347 case '\v': escaped[new_i++] = 'v'; break;
12348 default: escaped[new_i++] = '?'; break;
12351 else if (escaped)
12352 escaped[new_i++] = c;
12355 if (escaped)
12357 escaped[new_i] = 0;
12358 m_str = escaped;
12359 m_owned = true;
12363 /* Warn about a use of an identifier which was marked deprecated. Returns
12364 whether a warning was given. */
12366 bool
12367 warn_deprecated_use (tree node, tree attr)
12369 escaped_string msg;
12371 if (node == 0 || !warn_deprecated_decl)
12372 return false;
12374 if (!attr)
12376 if (DECL_P (node))
12377 attr = DECL_ATTRIBUTES (node);
12378 else if (TYPE_P (node))
12380 tree decl = TYPE_STUB_DECL (node);
12381 if (decl)
12382 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12383 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12384 != NULL_TREE)
12386 node = TREE_TYPE (decl);
12387 attr = TYPE_ATTRIBUTES (node);
12392 if (attr)
12393 attr = lookup_attribute ("deprecated", attr);
12395 if (attr)
12396 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12398 bool w = false;
12399 if (DECL_P (node))
12401 auto_diagnostic_group d;
12402 if (msg)
12403 w = warning (OPT_Wdeprecated_declarations,
12404 "%qD is deprecated: %s", node, (const char *) msg);
12405 else
12406 w = warning (OPT_Wdeprecated_declarations,
12407 "%qD is deprecated", node);
12408 if (w)
12409 inform (DECL_SOURCE_LOCATION (node), "declared here");
12411 else if (TYPE_P (node))
12413 tree what = NULL_TREE;
12414 tree decl = TYPE_STUB_DECL (node);
12416 if (TYPE_NAME (node))
12418 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12419 what = TYPE_NAME (node);
12420 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12421 && DECL_NAME (TYPE_NAME (node)))
12422 what = DECL_NAME (TYPE_NAME (node));
12425 auto_diagnostic_group d;
12426 if (what)
12428 if (msg)
12429 w = warning (OPT_Wdeprecated_declarations,
12430 "%qE is deprecated: %s", what, (const char *) msg);
12431 else
12432 w = warning (OPT_Wdeprecated_declarations,
12433 "%qE is deprecated", what);
12435 else
12437 if (msg)
12438 w = warning (OPT_Wdeprecated_declarations,
12439 "type is deprecated: %s", (const char *) msg);
12440 else
12441 w = warning (OPT_Wdeprecated_declarations,
12442 "type is deprecated");
12445 if (w && decl)
12446 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12449 return w;
12452 /* Error out with an identifier which was marked 'unavailable'. */
12453 void
12454 error_unavailable_use (tree node, tree attr)
12456 escaped_string msg;
12458 if (node == 0)
12459 return;
12461 if (!attr)
12463 if (DECL_P (node))
12464 attr = DECL_ATTRIBUTES (node);
12465 else if (TYPE_P (node))
12467 tree decl = TYPE_STUB_DECL (node);
12468 if (decl)
12469 attr = lookup_attribute ("unavailable",
12470 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12474 if (attr)
12475 attr = lookup_attribute ("unavailable", attr);
12477 if (attr)
12478 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12480 if (DECL_P (node))
12482 auto_diagnostic_group d;
12483 if (msg)
12484 error ("%qD is unavailable: %s", node, (const char *) msg);
12485 else
12486 error ("%qD is unavailable", node);
12487 inform (DECL_SOURCE_LOCATION (node), "declared here");
12489 else if (TYPE_P (node))
12491 tree what = NULL_TREE;
12492 tree decl = TYPE_STUB_DECL (node);
12494 if (TYPE_NAME (node))
12496 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12497 what = TYPE_NAME (node);
12498 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12499 && DECL_NAME (TYPE_NAME (node)))
12500 what = DECL_NAME (TYPE_NAME (node));
12503 auto_diagnostic_group d;
12504 if (what)
12506 if (msg)
12507 error ("%qE is unavailable: %s", what, (const char *) msg);
12508 else
12509 error ("%qE is unavailable", what);
12511 else
12513 if (msg)
12514 error ("type is unavailable: %s", (const char *) msg);
12515 else
12516 error ("type is unavailable");
12519 if (decl)
12520 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12524 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12525 somewhere in it. */
12527 bool
12528 contains_bitfld_component_ref_p (const_tree ref)
12530 while (handled_component_p (ref))
12532 if (TREE_CODE (ref) == COMPONENT_REF
12533 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12534 return true;
12535 ref = TREE_OPERAND (ref, 0);
12538 return false;
12541 /* Try to determine whether a TRY_CATCH expression can fall through.
12542 This is a subroutine of block_may_fallthru. */
12544 static bool
12545 try_catch_may_fallthru (const_tree stmt)
12547 tree_stmt_iterator i;
12549 /* If the TRY block can fall through, the whole TRY_CATCH can
12550 fall through. */
12551 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12552 return true;
12554 i = tsi_start (TREE_OPERAND (stmt, 1));
12555 switch (TREE_CODE (tsi_stmt (i)))
12557 case CATCH_EXPR:
12558 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12559 catch expression and a body. The whole TRY_CATCH may fall
12560 through iff any of the catch bodies falls through. */
12561 for (; !tsi_end_p (i); tsi_next (&i))
12563 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12564 return true;
12566 return false;
12568 case EH_FILTER_EXPR:
12569 /* The exception filter expression only matters if there is an
12570 exception. If the exception does not match EH_FILTER_TYPES,
12571 we will execute EH_FILTER_FAILURE, and we will fall through
12572 if that falls through. If the exception does match
12573 EH_FILTER_TYPES, the stack unwinder will continue up the
12574 stack, so we will not fall through. We don't know whether we
12575 will throw an exception which matches EH_FILTER_TYPES or not,
12576 so we just ignore EH_FILTER_TYPES and assume that we might
12577 throw an exception which doesn't match. */
12578 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12580 default:
12581 /* This case represents statements to be executed when an
12582 exception occurs. Those statements are implicitly followed
12583 by a RESX statement to resume execution after the exception.
12584 So in this case the TRY_CATCH never falls through. */
12585 return false;
12589 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12590 need not be 100% accurate; simply be conservative and return true if we
12591 don't know. This is used only to avoid stupidly generating extra code.
12592 If we're wrong, we'll just delete the extra code later. */
12594 bool
12595 block_may_fallthru (const_tree block)
12597 /* This CONST_CAST is okay because expr_last returns its argument
12598 unmodified and we assign it to a const_tree. */
12599 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12601 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12603 case GOTO_EXPR:
12604 case RETURN_EXPR:
12605 /* Easy cases. If the last statement of the block implies
12606 control transfer, then we can't fall through. */
12607 return false;
12609 case SWITCH_EXPR:
12610 /* If there is a default: label or case labels cover all possible
12611 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12612 to some case label in all cases and all we care is whether the
12613 SWITCH_BODY falls through. */
12614 if (SWITCH_ALL_CASES_P (stmt))
12615 return block_may_fallthru (SWITCH_BODY (stmt));
12616 return true;
12618 case COND_EXPR:
12619 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12620 return true;
12621 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12623 case BIND_EXPR:
12624 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12626 case TRY_CATCH_EXPR:
12627 return try_catch_may_fallthru (stmt);
12629 case TRY_FINALLY_EXPR:
12630 /* The finally clause is always executed after the try clause,
12631 so if it does not fall through, then the try-finally will not
12632 fall through. Otherwise, if the try clause does not fall
12633 through, then when the finally clause falls through it will
12634 resume execution wherever the try clause was going. So the
12635 whole try-finally will only fall through if both the try
12636 clause and the finally clause fall through. */
12637 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12638 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12640 case EH_ELSE_EXPR:
12641 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12643 case MODIFY_EXPR:
12644 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12645 stmt = TREE_OPERAND (stmt, 1);
12646 else
12647 return true;
12648 /* FALLTHRU */
12650 case CALL_EXPR:
12651 /* Functions that do not return do not fall through. */
12652 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12654 case CLEANUP_POINT_EXPR:
12655 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12657 case TARGET_EXPR:
12658 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12660 case ERROR_MARK:
12661 return true;
12663 default:
12664 return lang_hooks.block_may_fallthru (stmt);
12668 /* True if we are using EH to handle cleanups. */
12669 static bool using_eh_for_cleanups_flag = false;
12671 /* This routine is called from front ends to indicate eh should be used for
12672 cleanups. */
12673 void
12674 using_eh_for_cleanups (void)
12676 using_eh_for_cleanups_flag = true;
12679 /* Query whether EH is used for cleanups. */
12680 bool
12681 using_eh_for_cleanups_p (void)
12683 return using_eh_for_cleanups_flag;
12686 /* Wrapper for tree_code_name to ensure that tree code is valid */
12687 const char *
12688 get_tree_code_name (enum tree_code code)
12690 const char *invalid = "<invalid tree code>";
12692 /* The tree_code enum promotes to signed, but we could be getting
12693 invalid values, so force an unsigned comparison. */
12694 if (unsigned (code) >= MAX_TREE_CODES)
12696 if ((unsigned)code == 0xa5a5)
12697 return "ggc_freed";
12698 return invalid;
12701 return tree_code_name[code];
12704 /* Drops the TREE_OVERFLOW flag from T. */
12706 tree
12707 drop_tree_overflow (tree t)
12709 gcc_checking_assert (TREE_OVERFLOW (t));
12711 /* For tree codes with a sharing machinery re-build the result. */
12712 if (poly_int_tree_p (t))
12713 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12715 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12716 and canonicalize the result. */
12717 if (TREE_CODE (t) == VECTOR_CST)
12719 tree_vector_builder builder;
12720 builder.new_unary_operation (TREE_TYPE (t), t, true);
12721 unsigned int count = builder.encoded_nelts ();
12722 for (unsigned int i = 0; i < count; ++i)
12724 tree elt = VECTOR_CST_ELT (t, i);
12725 if (TREE_OVERFLOW (elt))
12726 elt = drop_tree_overflow (elt);
12727 builder.quick_push (elt);
12729 return builder.build ();
12732 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12733 and drop the flag. */
12734 t = copy_node (t);
12735 TREE_OVERFLOW (t) = 0;
12737 /* For constants that contain nested constants, drop the flag
12738 from those as well. */
12739 if (TREE_CODE (t) == COMPLEX_CST)
12741 if (TREE_OVERFLOW (TREE_REALPART (t)))
12742 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12743 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12744 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12747 return t;
12750 /* Given a memory reference expression T, return its base address.
12751 The base address of a memory reference expression is the main
12752 object being referenced. For instance, the base address for
12753 'array[i].fld[j]' is 'array'. You can think of this as stripping
12754 away the offset part from a memory address.
12756 This function calls handled_component_p to strip away all the inner
12757 parts of the memory reference until it reaches the base object. */
12759 tree
12760 get_base_address (tree t)
12762 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12763 t = TREE_OPERAND (t, 0);
12764 while (handled_component_p (t))
12765 t = TREE_OPERAND (t, 0);
12767 if ((TREE_CODE (t) == MEM_REF
12768 || TREE_CODE (t) == TARGET_MEM_REF)
12769 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12770 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12772 return t;
12775 /* Return a tree of sizetype representing the size, in bytes, of the element
12776 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12778 tree
12779 array_ref_element_size (tree exp)
12781 tree aligned_size = TREE_OPERAND (exp, 3);
12782 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12783 location_t loc = EXPR_LOCATION (exp);
12785 /* If a size was specified in the ARRAY_REF, it's the size measured
12786 in alignment units of the element type. So multiply by that value. */
12787 if (aligned_size)
12789 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12790 sizetype from another type of the same width and signedness. */
12791 if (TREE_TYPE (aligned_size) != sizetype)
12792 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12793 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12794 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12797 /* Otherwise, take the size from that of the element type. Substitute
12798 any PLACEHOLDER_EXPR that we have. */
12799 else
12800 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12803 /* Return a tree representing the lower bound of the array mentioned in
12804 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12806 tree
12807 array_ref_low_bound (tree exp)
12809 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12811 /* If a lower bound is specified in EXP, use it. */
12812 if (TREE_OPERAND (exp, 2))
12813 return TREE_OPERAND (exp, 2);
12815 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12816 substituting for a PLACEHOLDER_EXPR as needed. */
12817 if (domain_type && TYPE_MIN_VALUE (domain_type))
12818 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12820 /* Otherwise, return a zero of the appropriate type. */
12821 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12822 return (idxtype == error_mark_node
12823 ? integer_zero_node : build_int_cst (idxtype, 0));
12826 /* Return a tree representing the upper bound of the array mentioned in
12827 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12829 tree
12830 array_ref_up_bound (tree exp)
12832 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12834 /* If there is a domain type and it has an upper bound, use it, substituting
12835 for a PLACEHOLDER_EXPR as needed. */
12836 if (domain_type && TYPE_MAX_VALUE (domain_type))
12837 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12839 /* Otherwise fail. */
12840 return NULL_TREE;
12843 /* Returns true if REF is an array reference, a component reference,
12844 or a memory reference to an array whose actual size might be larger
12845 than its upper bound implies, there are multiple cases:
12846 A. a ref to a flexible array member at the end of a structure;
12847 B. a ref to an array with a different type against the original decl;
12848 for example:
12850 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12851 (*((char(*)[16])&a[0]))[i+8]
12853 C. a ref to an array that was passed as a parameter;
12854 for example:
12856 int test (uint8_t *p, uint32_t t[1][1], int n) {
12857 for (int i = 0; i < 4; i++, p++)
12858 t[i][0] = ...;
12860 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12863 bool
12864 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12866 /* The TYPE for this array referece. */
12867 tree atype = NULL_TREE;
12868 /* The FIELD_DECL for the array field in the containing structure. */
12869 tree afield_decl = NULL_TREE;
12870 /* Whether this array is the trailing array of a structure. */
12871 bool is_trailing_array_tmp = false;
12872 if (!is_trailing_array)
12873 is_trailing_array = &is_trailing_array_tmp;
12875 if (TREE_CODE (ref) == ARRAY_REF
12876 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12878 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12879 ref = TREE_OPERAND (ref, 0);
12881 else if (TREE_CODE (ref) == COMPONENT_REF
12882 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12884 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12885 afield_decl = TREE_OPERAND (ref, 1);
12887 else if (TREE_CODE (ref) == MEM_REF)
12889 tree arg = TREE_OPERAND (ref, 0);
12890 if (TREE_CODE (arg) == ADDR_EXPR)
12891 arg = TREE_OPERAND (arg, 0);
12892 tree argtype = TREE_TYPE (arg);
12893 if (TREE_CODE (argtype) == RECORD_TYPE)
12895 if (tree fld = last_field (argtype))
12897 atype = TREE_TYPE (fld);
12898 afield_decl = fld;
12899 if (TREE_CODE (atype) != ARRAY_TYPE)
12900 return false;
12901 if (VAR_P (arg) && DECL_SIZE (fld))
12902 return false;
12904 else
12905 return false;
12907 else
12908 return false;
12910 else
12911 return false;
12913 if (TREE_CODE (ref) == STRING_CST)
12914 return false;
12916 tree ref_to_array = ref;
12917 while (handled_component_p (ref))
12919 /* If the reference chain contains a component reference to a
12920 non-union type and there follows another field the reference
12921 is not at the end of a structure. */
12922 if (TREE_CODE (ref) == COMPONENT_REF)
12924 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12926 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12927 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12928 nextf = DECL_CHAIN (nextf);
12929 if (nextf)
12930 return false;
12933 /* If we have a multi-dimensional array we do not consider
12934 a non-innermost dimension as flex array if the whole
12935 multi-dimensional array is at struct end.
12936 Same for an array of aggregates with a trailing array
12937 member. */
12938 else if (TREE_CODE (ref) == ARRAY_REF)
12939 return false;
12940 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12942 /* If we view an underlying object as sth else then what we
12943 gathered up to now is what we have to rely on. */
12944 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12945 break;
12946 else
12947 gcc_unreachable ();
12949 ref = TREE_OPERAND (ref, 0);
12952 gcc_assert (!afield_decl
12953 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12955 /* The array now is at struct end. Treat flexible array member as
12956 always subject to extend, even into just padding constrained by
12957 an underlying decl. */
12958 if (! TYPE_SIZE (atype)
12959 || ! TYPE_DOMAIN (atype)
12960 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12962 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12963 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12966 /* If the reference is based on a declared entity, the size of the array
12967 is constrained by its given domain. (Do not trust commons PR/69368). */
12968 ref = get_base_address (ref);
12969 if (ref
12970 && DECL_P (ref)
12971 && !(flag_unconstrained_commons
12972 && VAR_P (ref) && DECL_COMMON (ref))
12973 && DECL_SIZE_UNIT (ref)
12974 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12976 /* If the object itself is the array it is not at struct end. */
12977 if (DECL_P (ref_to_array))
12978 return false;
12980 /* Check whether the array domain covers all of the available
12981 padding. */
12982 poly_int64 offset;
12983 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12984 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12985 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12987 *is_trailing_array
12988 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12989 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12991 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12993 *is_trailing_array
12994 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12995 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12998 /* If at least one extra element fits it is a flexarray. */
12999 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13000 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13001 + 2)
13002 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13003 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13005 *is_trailing_array
13006 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13007 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13010 return false;
13013 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13014 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13018 /* Return a tree representing the offset, in bytes, of the field referenced
13019 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13021 tree
13022 component_ref_field_offset (tree exp)
13024 tree aligned_offset = TREE_OPERAND (exp, 2);
13025 tree field = TREE_OPERAND (exp, 1);
13026 location_t loc = EXPR_LOCATION (exp);
13028 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13029 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13030 value. */
13031 if (aligned_offset)
13033 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13034 sizetype from another type of the same width and signedness. */
13035 if (TREE_TYPE (aligned_offset) != sizetype)
13036 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13037 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13038 size_int (DECL_OFFSET_ALIGN (field)
13039 / BITS_PER_UNIT));
13042 /* Otherwise, take the offset from that of the field. Substitute
13043 any PLACEHOLDER_EXPR that we have. */
13044 else
13045 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13048 /* Given the initializer INIT, return the initializer for the field
13049 DECL if it exists, otherwise null. Used to obtain the initializer
13050 for a flexible array member and determine its size. */
13052 static tree
13053 get_initializer_for (tree init, tree decl)
13055 STRIP_NOPS (init);
13057 tree fld, fld_init;
13058 unsigned HOST_WIDE_INT i;
13059 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13061 if (decl == fld)
13062 return fld_init;
13064 if (TREE_CODE (fld) == CONSTRUCTOR)
13066 fld_init = get_initializer_for (fld_init, decl);
13067 if (fld_init)
13068 return fld_init;
13072 return NULL_TREE;
13075 /* Determines the special array member type for the array reference REF. */
13076 special_array_member
13077 component_ref_sam_type (tree ref)
13079 special_array_member sam_type = special_array_member::none;
13081 tree member = TREE_OPERAND (ref, 1);
13082 tree memsize = DECL_SIZE_UNIT (member);
13083 if (memsize)
13085 tree memtype = TREE_TYPE (member);
13086 if (TREE_CODE (memtype) != ARRAY_TYPE)
13087 return sam_type;
13089 bool trailing = false;
13090 (void) array_ref_flexible_size_p (ref, &trailing);
13091 bool zero_elts = integer_zerop (memsize);
13092 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13094 /* If array element has zero size, verify if it is a flexible
13095 array member or zero length array. Clear zero_elts if
13096 it has one or more members or is a VLA member. */
13097 if (tree dom = TYPE_DOMAIN (memtype))
13098 if (tree min = TYPE_MIN_VALUE (dom))
13099 if (tree max = TYPE_MAX_VALUE (dom))
13100 if (TREE_CODE (min) != INTEGER_CST
13101 || TREE_CODE (max) != INTEGER_CST
13102 || !((integer_zerop (min) && integer_all_onesp (max))
13103 || tree_int_cst_lt (max, min)))
13104 zero_elts = false;
13106 if (!trailing && !zero_elts)
13107 /* MEMBER is an interior array with more than one element. */
13108 return special_array_member::int_n;
13110 if (zero_elts)
13112 if (trailing)
13113 return special_array_member::trail_0;
13114 else
13115 return special_array_member::int_0;
13118 if (!zero_elts)
13119 if (tree dom = TYPE_DOMAIN (memtype))
13120 if (tree min = TYPE_MIN_VALUE (dom))
13121 if (tree max = TYPE_MAX_VALUE (dom))
13122 if (TREE_CODE (min) == INTEGER_CST
13123 && TREE_CODE (max) == INTEGER_CST)
13125 offset_int minidx = wi::to_offset (min);
13126 offset_int maxidx = wi::to_offset (max);
13127 offset_int neltsm1 = maxidx - minidx;
13128 if (neltsm1 > 0)
13129 /* MEMBER is a trailing array with more than
13130 one elements. */
13131 return special_array_member::trail_n;
13133 if (neltsm1 == 0)
13134 return special_array_member::trail_1;
13138 return sam_type;
13141 /* Determines the size of the member referenced by the COMPONENT_REF
13142 REF, using its initializer expression if necessary in order to
13143 determine the size of an initialized flexible array member.
13144 If non-null, set *SAM to the type of special array member.
13145 Returns the size as sizetype (which might be zero for an object
13146 with an uninitialized flexible array member) or null if the size
13147 cannot be determined. */
13149 tree
13150 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13152 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13154 special_array_member sambuf;
13155 if (!sam)
13156 sam = &sambuf;
13157 *sam = component_ref_sam_type (ref);
13159 /* The object/argument referenced by the COMPONENT_REF and its type. */
13160 tree arg = TREE_OPERAND (ref, 0);
13161 tree argtype = TREE_TYPE (arg);
13162 /* The referenced member. */
13163 tree member = TREE_OPERAND (ref, 1);
13165 tree memsize = DECL_SIZE_UNIT (member);
13166 if (memsize)
13168 tree memtype = TREE_TYPE (member);
13169 if (TREE_CODE (memtype) != ARRAY_TYPE)
13170 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13171 to the type of a class with a virtual base which doesn't
13172 reflect the size of the virtual's members (see pr97595).
13173 If that's the case fail for now and implement something
13174 more robust in the future. */
13175 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13176 ? memsize : NULL_TREE);
13178 /* 2-or-more elements arrays are treated as normal arrays by default. */
13179 if (*sam == special_array_member::int_n
13180 || *sam == special_array_member::trail_n)
13181 return memsize;
13183 tree afield_decl = TREE_OPERAND (ref, 1);
13184 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13185 /* If the trailing array is a not a flexible array member, treat it as
13186 a normal array. */
13187 if (DECL_NOT_FLEXARRAY (afield_decl)
13188 && *sam != special_array_member::int_0)
13189 return memsize;
13191 if (*sam == special_array_member::int_0)
13192 memsize = NULL_TREE;
13194 /* For a reference to a flexible array member of a union
13195 use the size of the union instead of the size of the member. */
13196 if (TREE_CODE (argtype) == UNION_TYPE)
13197 memsize = TYPE_SIZE_UNIT (argtype);
13200 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13201 array member, or an array of length one treated as such. */
13203 /* If the reference is to a declared object and the member a true
13204 flexible array, try to determine its size from its initializer. */
13205 poly_int64 baseoff = 0;
13206 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13207 if (!base || !VAR_P (base))
13209 if (*sam != special_array_member::int_0)
13210 return NULL_TREE;
13212 if (TREE_CODE (arg) != COMPONENT_REF)
13213 return NULL_TREE;
13215 base = arg;
13216 while (TREE_CODE (base) == COMPONENT_REF)
13217 base = TREE_OPERAND (base, 0);
13218 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13221 /* BASE is the declared object of which MEMBER is either a member
13222 or that is cast to ARGTYPE (e.g., a char buffer used to store
13223 an ARGTYPE object). */
13224 tree basetype = TREE_TYPE (base);
13226 /* Determine the base type of the referenced object. If it's
13227 the same as ARGTYPE and MEMBER has a known size, return it. */
13228 tree bt = basetype;
13229 if (*sam != special_array_member::int_0)
13230 while (TREE_CODE (bt) == ARRAY_TYPE)
13231 bt = TREE_TYPE (bt);
13232 bool typematch = useless_type_conversion_p (argtype, bt);
13233 if (memsize && typematch)
13234 return memsize;
13236 memsize = NULL_TREE;
13238 if (typematch)
13239 /* MEMBER is a true flexible array member. Compute its size from
13240 the initializer of the BASE object if it has one. */
13241 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13242 if (init != error_mark_node)
13244 init = get_initializer_for (init, member);
13245 if (init)
13247 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13248 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13250 /* Use the larger of the initializer size and the tail
13251 padding in the enclosing struct. */
13252 poly_int64 rsz = tree_to_poly_int64 (refsize);
13253 rsz -= baseoff;
13254 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13255 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13258 baseoff = 0;
13262 if (!memsize)
13264 if (typematch)
13266 if (DECL_P (base)
13267 && DECL_EXTERNAL (base)
13268 && bt == basetype
13269 && *sam != special_array_member::int_0)
13270 /* The size of a flexible array member of an extern struct
13271 with no initializer cannot be determined (it's defined
13272 in another translation unit and can have an initializer
13273 with an arbitrary number of elements). */
13274 return NULL_TREE;
13276 /* Use the size of the base struct or, for interior zero-length
13277 arrays, the size of the enclosing type. */
13278 memsize = TYPE_SIZE_UNIT (bt);
13280 else if (DECL_P (base))
13281 /* Use the size of the BASE object (possibly an array of some
13282 other type such as char used to store the struct). */
13283 memsize = DECL_SIZE_UNIT (base);
13284 else
13285 return NULL_TREE;
13288 /* If the flexible array member has a known size use the greater
13289 of it and the tail padding in the enclosing struct.
13290 Otherwise, when the size of the flexible array member is unknown
13291 and the referenced object is not a struct, use the size of its
13292 type when known. This detects sizes of array buffers when cast
13293 to struct types with flexible array members. */
13294 if (memsize)
13296 if (!tree_fits_poly_int64_p (memsize))
13297 return NULL_TREE;
13298 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13299 if (known_lt (baseoff, memsz64))
13301 memsz64 -= baseoff;
13302 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13304 return size_zero_node;
13307 /* Return "don't know" for an external non-array object since its
13308 flexible array member can be initialized to have any number of
13309 elements. Otherwise, return zero because the flexible array
13310 member has no elements. */
13311 return (DECL_P (base)
13312 && DECL_EXTERNAL (base)
13313 && (!typematch
13314 || TREE_CODE (basetype) != ARRAY_TYPE)
13315 ? NULL_TREE : size_zero_node);
13318 /* Return the machine mode of T. For vectors, returns the mode of the
13319 inner type. The main use case is to feed the result to HONOR_NANS,
13320 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13322 machine_mode
13323 element_mode (const_tree t)
13325 if (!TYPE_P (t))
13326 t = TREE_TYPE (t);
13327 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13328 t = TREE_TYPE (t);
13329 return TYPE_MODE (t);
13332 /* Vector types need to re-check the target flags each time we report
13333 the machine mode. We need to do this because attribute target can
13334 change the result of vector_mode_supported_p and have_regs_of_mode
13335 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13336 change on a per-function basis. */
13337 /* ??? Possibly a better solution is to run through all the types
13338 referenced by a function and re-compute the TYPE_MODE once, rather
13339 than make the TYPE_MODE macro call a function. */
13341 machine_mode
13342 vector_type_mode (const_tree t)
13344 machine_mode mode;
13346 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13348 mode = t->type_common.mode;
13349 if (VECTOR_MODE_P (mode)
13350 && (!targetm.vector_mode_supported_p (mode)
13351 || !have_regs_of_mode[mode]))
13353 scalar_int_mode innermode;
13355 /* For integers, try mapping it to a same-sized scalar mode. */
13356 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13358 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13359 * GET_MODE_BITSIZE (innermode));
13360 scalar_int_mode mode;
13361 if (int_mode_for_size (size, 0).exists (&mode)
13362 && have_regs_of_mode[mode])
13363 return mode;
13366 return BLKmode;
13369 return mode;
13372 /* Return the size in bits of each element of vector type TYPE. */
13374 unsigned int
13375 vector_element_bits (const_tree type)
13377 gcc_checking_assert (VECTOR_TYPE_P (type));
13378 if (VECTOR_BOOLEAN_TYPE_P (type))
13379 return TYPE_PRECISION (TREE_TYPE (type));
13380 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13383 /* Calculate the size in bits of each element of vector type TYPE
13384 and return the result as a tree of type bitsizetype. */
13386 tree
13387 vector_element_bits_tree (const_tree type)
13389 gcc_checking_assert (VECTOR_TYPE_P (type));
13390 if (VECTOR_BOOLEAN_TYPE_P (type))
13391 return bitsize_int (vector_element_bits (type));
13392 return TYPE_SIZE (TREE_TYPE (type));
13395 /* Verify that basic properties of T match TV and thus T can be a variant of
13396 TV. TV should be the more specified variant (i.e. the main variant). */
13398 static bool
13399 verify_type_variant (const_tree t, tree tv)
13401 /* Type variant can differ by:
13403 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13404 ENCODE_QUAL_ADDR_SPACE.
13405 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13406 in this case some values may not be set in the variant types
13407 (see TYPE_COMPLETE_P checks).
13408 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13409 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13410 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13411 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13412 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13413 this is necessary to make it possible to merge types form different TUs
13414 - arrays, pointers and references may have TREE_TYPE that is a variant
13415 of TREE_TYPE of their main variants.
13416 - aggregates may have new TYPE_FIELDS list that list variants of
13417 the main variant TYPE_FIELDS.
13418 - vector types may differ by TYPE_VECTOR_OPAQUE
13421 /* Convenience macro for matching individual fields. */
13422 #define verify_variant_match(flag) \
13423 do { \
13424 if (flag (tv) != flag (t)) \
13426 error ("type variant differs by %s", #flag); \
13427 debug_tree (tv); \
13428 return false; \
13430 } while (false)
13432 /* tree_base checks. */
13434 verify_variant_match (TREE_CODE);
13435 /* FIXME: Ada builds non-artificial variants of artificial types. */
13436 #if 0
13437 if (TYPE_ARTIFICIAL (tv))
13438 verify_variant_match (TYPE_ARTIFICIAL);
13439 #endif
13440 if (POINTER_TYPE_P (tv))
13441 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13442 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13443 verify_variant_match (TYPE_UNSIGNED);
13444 verify_variant_match (TYPE_PACKED);
13445 if (TREE_CODE (t) == REFERENCE_TYPE)
13446 verify_variant_match (TYPE_REF_IS_RVALUE);
13447 if (AGGREGATE_TYPE_P (t))
13448 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13449 else
13450 verify_variant_match (TYPE_SATURATING);
13451 /* FIXME: This check trigger during libstdc++ build. */
13452 #if 0
13453 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13454 verify_variant_match (TYPE_FINAL_P);
13455 #endif
13457 /* tree_type_common checks. */
13459 if (COMPLETE_TYPE_P (t))
13461 verify_variant_match (TYPE_MODE);
13462 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13463 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13464 verify_variant_match (TYPE_SIZE);
13465 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13466 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13467 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13469 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13470 TYPE_SIZE_UNIT (tv), 0));
13471 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13472 debug_tree (tv);
13473 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13474 debug_tree (TYPE_SIZE_UNIT (tv));
13475 error ("type%'s %<TYPE_SIZE_UNIT%>");
13476 debug_tree (TYPE_SIZE_UNIT (t));
13477 return false;
13479 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13481 verify_variant_match (TYPE_PRECISION_RAW);
13482 if (RECORD_OR_UNION_TYPE_P (t))
13483 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13484 else if (TREE_CODE (t) == ARRAY_TYPE)
13485 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13486 /* During LTO we merge variant lists from diferent translation units
13487 that may differ BY TYPE_CONTEXT that in turn may point
13488 to TRANSLATION_UNIT_DECL.
13489 Ada also builds variants of types with different TYPE_CONTEXT. */
13490 #if 0
13491 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13492 verify_variant_match (TYPE_CONTEXT);
13493 #endif
13494 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13495 verify_variant_match (TYPE_STRING_FLAG);
13496 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13497 verify_variant_match (TYPE_CXX_ODR_P);
13498 if (TYPE_ALIAS_SET_KNOWN_P (t))
13500 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13501 debug_tree (tv);
13502 return false;
13505 /* tree_type_non_common checks. */
13507 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13508 and dangle the pointer from time to time. */
13509 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13510 && (in_lto_p || !TYPE_VFIELD (tv)
13511 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13513 error ("type variant has different %<TYPE_VFIELD%>");
13514 debug_tree (tv);
13515 return false;
13517 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13518 || TREE_CODE (t) == INTEGER_TYPE
13519 || TREE_CODE (t) == BOOLEAN_TYPE
13520 || TREE_CODE (t) == BITINT_TYPE
13521 || SCALAR_FLOAT_TYPE_P (t)
13522 || FIXED_POINT_TYPE_P (t))
13524 verify_variant_match (TYPE_MAX_VALUE);
13525 verify_variant_match (TYPE_MIN_VALUE);
13527 if (TREE_CODE (t) == METHOD_TYPE)
13528 verify_variant_match (TYPE_METHOD_BASETYPE);
13529 if (TREE_CODE (t) == OFFSET_TYPE)
13530 verify_variant_match (TYPE_OFFSET_BASETYPE);
13531 if (TREE_CODE (t) == ARRAY_TYPE)
13532 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13533 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13534 or even type's main variant. This is needed to make bootstrap pass
13535 and the bug seems new in GCC 5.
13536 C++ FE should be updated to make this consistent and we should check
13537 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13538 is a match with main variant.
13540 Also disable the check for Java for now because of parser hack that builds
13541 first an dummy BINFO and then sometimes replace it by real BINFO in some
13542 of the copies. */
13543 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13544 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13545 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13546 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13547 at LTO time only. */
13548 && (in_lto_p && odr_type_p (t)))
13550 error ("type variant has different %<TYPE_BINFO%>");
13551 debug_tree (tv);
13552 error ("type variant%'s %<TYPE_BINFO%>");
13553 debug_tree (TYPE_BINFO (tv));
13554 error ("type%'s %<TYPE_BINFO%>");
13555 debug_tree (TYPE_BINFO (t));
13556 return false;
13559 /* Check various uses of TYPE_VALUES_RAW. */
13560 if (TREE_CODE (t) == ENUMERAL_TYPE
13561 && TYPE_VALUES (t))
13562 verify_variant_match (TYPE_VALUES);
13563 else if (TREE_CODE (t) == ARRAY_TYPE)
13564 verify_variant_match (TYPE_DOMAIN);
13565 /* Permit incomplete variants of complete type. While FEs may complete
13566 all variants, this does not happen for C++ templates in all cases. */
13567 else if (RECORD_OR_UNION_TYPE_P (t)
13568 && COMPLETE_TYPE_P (t)
13569 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13571 tree f1, f2;
13573 /* Fortran builds qualified variants as new records with items of
13574 qualified type. Verify that they looks same. */
13575 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13576 f1 && f2;
13577 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13578 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13579 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13580 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13581 /* FIXME: gfc_nonrestricted_type builds all types as variants
13582 with exception of pointer types. It deeply copies the type
13583 which means that we may end up with a variant type
13584 referring non-variant pointer. We may change it to
13585 produce types as variants, too, like
13586 objc_get_protocol_qualified_type does. */
13587 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13588 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13589 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13590 break;
13591 if (f1 || f2)
13593 error ("type variant has different %<TYPE_FIELDS%>");
13594 debug_tree (tv);
13595 error ("first mismatch is field");
13596 debug_tree (f1);
13597 error ("and field");
13598 debug_tree (f2);
13599 return false;
13602 else if (FUNC_OR_METHOD_TYPE_P (t))
13603 verify_variant_match (TYPE_ARG_TYPES);
13604 /* For C++ the qualified variant of array type is really an array type
13605 of qualified TREE_TYPE.
13606 objc builds variants of pointer where pointer to type is a variant, too
13607 in objc_get_protocol_qualified_type. */
13608 if (TREE_TYPE (t) != TREE_TYPE (tv)
13609 && ((TREE_CODE (t) != ARRAY_TYPE
13610 && !POINTER_TYPE_P (t))
13611 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13612 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13614 error ("type variant has different %<TREE_TYPE%>");
13615 debug_tree (tv);
13616 error ("type variant%'s %<TREE_TYPE%>");
13617 debug_tree (TREE_TYPE (tv));
13618 error ("type%'s %<TREE_TYPE%>");
13619 debug_tree (TREE_TYPE (t));
13620 return false;
13622 if (type_with_alias_set_p (t)
13623 && !gimple_canonical_types_compatible_p (t, tv, false))
13625 error ("type is not compatible with its variant");
13626 debug_tree (tv);
13627 error ("type variant%'s %<TREE_TYPE%>");
13628 debug_tree (TREE_TYPE (tv));
13629 error ("type%'s %<TREE_TYPE%>");
13630 debug_tree (TREE_TYPE (t));
13631 return false;
13633 return true;
13634 #undef verify_variant_match
13638 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13639 the middle-end types_compatible_p function. It needs to avoid
13640 claiming types are different for types that should be treated
13641 the same with respect to TBAA. Canonical types are also used
13642 for IL consistency checks via the useless_type_conversion_p
13643 predicate which does not handle all type kinds itself but falls
13644 back to pointer-comparison of TYPE_CANONICAL for aggregates
13645 for example. */
13647 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13648 type calculation because we need to allow inter-operability between signed
13649 and unsigned variants. */
13651 bool
13652 type_with_interoperable_signedness (const_tree type)
13654 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13655 signed char and unsigned char. Similarly fortran FE builds
13656 C_SIZE_T as signed type, while C defines it unsigned. */
13658 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13659 == INTEGER_TYPE
13660 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13661 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13664 /* Return true iff T1 and T2 are structurally identical for what
13665 TBAA is concerned.
13666 This function is used both by lto.cc canonical type merging and by the
13667 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13668 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13669 only for LTO because only in these cases TYPE_CANONICAL equivalence
13670 correspond to one defined by gimple_canonical_types_compatible_p. */
13672 bool
13673 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13674 bool trust_type_canonical)
13676 /* Type variants should be same as the main variant. When not doing sanity
13677 checking to verify this fact, go to main variants and save some work. */
13678 if (trust_type_canonical)
13680 t1 = TYPE_MAIN_VARIANT (t1);
13681 t2 = TYPE_MAIN_VARIANT (t2);
13684 /* Check first for the obvious case of pointer identity. */
13685 if (t1 == t2)
13686 return true;
13688 /* Check that we have two types to compare. */
13689 if (t1 == NULL_TREE || t2 == NULL_TREE)
13690 return false;
13692 /* We consider complete types always compatible with incomplete type.
13693 This does not make sense for canonical type calculation and thus we
13694 need to ensure that we are never called on it.
13696 FIXME: For more correctness the function probably should have three modes
13697 1) mode assuming that types are complete mathcing their structure
13698 2) mode allowing incomplete types but producing equivalence classes
13699 and thus ignoring all info from complete types
13700 3) mode allowing incomplete types to match complete but checking
13701 compatibility between complete types.
13703 1 and 2 can be used for canonical type calculation. 3 is the real
13704 definition of type compatibility that can be used i.e. for warnings during
13705 declaration merging. */
13707 gcc_assert (!trust_type_canonical
13708 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13710 /* If the types have been previously registered and found equal
13711 they still are. */
13713 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13714 && trust_type_canonical)
13716 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13717 they are always NULL, but they are set to non-NULL for types
13718 constructed by build_pointer_type and variants. In this case the
13719 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13720 all pointers are considered equal. Be sure to not return false
13721 negatives. */
13722 gcc_checking_assert (canonical_type_used_p (t1)
13723 && canonical_type_used_p (t2));
13724 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13727 /* For types where we do ODR based TBAA the canonical type is always
13728 set correctly, so we know that types are different if their
13729 canonical types does not match. */
13730 if (trust_type_canonical
13731 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13732 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13733 return false;
13735 /* Can't be the same type if the types don't have the same code. */
13736 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13737 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13738 return false;
13740 /* Qualifiers do not matter for canonical type comparison purposes. */
13742 /* Void types and nullptr types are always the same. */
13743 if (VOID_TYPE_P (t1)
13744 || TREE_CODE (t1) == NULLPTR_TYPE)
13745 return true;
13747 /* Can't be the same type if they have different mode. */
13748 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13749 return false;
13751 /* Non-aggregate types can be handled cheaply. */
13752 if (INTEGRAL_TYPE_P (t1)
13753 || SCALAR_FLOAT_TYPE_P (t1)
13754 || FIXED_POINT_TYPE_P (t1)
13755 || VECTOR_TYPE_P (t1)
13756 || TREE_CODE (t1) == COMPLEX_TYPE
13757 || TREE_CODE (t1) == OFFSET_TYPE
13758 || POINTER_TYPE_P (t1))
13760 /* Can't be the same type if they have different precision. */
13761 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13762 return false;
13764 /* In some cases the signed and unsigned types are required to be
13765 inter-operable. */
13766 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13767 && !type_with_interoperable_signedness (t1))
13768 return false;
13770 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13771 interoperable with "signed char". Unless all frontends are revisited
13772 to agree on these types, we must ignore the flag completely. */
13774 /* Fortran standard define C_PTR type that is compatible with every
13775 C pointer. For this reason we need to glob all pointers into one.
13776 Still pointers in different address spaces are not compatible. */
13777 if (POINTER_TYPE_P (t1))
13779 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13780 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13781 return false;
13784 /* Tail-recurse to components. */
13785 if (VECTOR_TYPE_P (t1)
13786 || TREE_CODE (t1) == COMPLEX_TYPE)
13787 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13788 TREE_TYPE (t2),
13789 trust_type_canonical);
13791 return true;
13794 /* Do type-specific comparisons. */
13795 switch (TREE_CODE (t1))
13797 case ARRAY_TYPE:
13798 /* Array types are the same if the element types are the same and
13799 the number of elements are the same. */
13800 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13801 trust_type_canonical)
13802 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13803 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13804 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13805 return false;
13806 else
13808 tree i1 = TYPE_DOMAIN (t1);
13809 tree i2 = TYPE_DOMAIN (t2);
13811 /* For an incomplete external array, the type domain can be
13812 NULL_TREE. Check this condition also. */
13813 if (i1 == NULL_TREE && i2 == NULL_TREE)
13814 return true;
13815 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13816 return false;
13817 else
13819 tree min1 = TYPE_MIN_VALUE (i1);
13820 tree min2 = TYPE_MIN_VALUE (i2);
13821 tree max1 = TYPE_MAX_VALUE (i1);
13822 tree max2 = TYPE_MAX_VALUE (i2);
13824 /* The minimum/maximum values have to be the same. */
13825 if ((min1 == min2
13826 || (min1 && min2
13827 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13828 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13829 || operand_equal_p (min1, min2, 0))))
13830 && (max1 == max2
13831 || (max1 && max2
13832 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13833 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13834 || operand_equal_p (max1, max2, 0)))))
13835 return true;
13836 else
13837 return false;
13841 case METHOD_TYPE:
13842 case FUNCTION_TYPE:
13843 /* Function types are the same if the return type and arguments types
13844 are the same. */
13845 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13846 trust_type_canonical))
13847 return false;
13849 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13850 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13851 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13852 return true;
13853 else
13855 tree parms1, parms2;
13857 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13858 parms1 && parms2;
13859 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13861 if (!gimple_canonical_types_compatible_p
13862 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13863 trust_type_canonical))
13864 return false;
13867 if (parms1 || parms2)
13868 return false;
13870 return true;
13873 case RECORD_TYPE:
13874 case UNION_TYPE:
13875 case QUAL_UNION_TYPE:
13877 tree f1, f2;
13879 /* Don't try to compare variants of an incomplete type, before
13880 TYPE_FIELDS has been copied around. */
13881 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13882 return true;
13885 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13886 return false;
13888 /* For aggregate types, all the fields must be the same. */
13889 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13890 f1 || f2;
13891 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13893 /* Skip non-fields and zero-sized fields. */
13894 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13895 || (DECL_SIZE (f1)
13896 && integer_zerop (DECL_SIZE (f1)))))
13897 f1 = TREE_CHAIN (f1);
13898 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13899 || (DECL_SIZE (f2)
13900 && integer_zerop (DECL_SIZE (f2)))))
13901 f2 = TREE_CHAIN (f2);
13902 if (!f1 || !f2)
13903 break;
13904 /* The fields must have the same name, offset and type. */
13905 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13906 || !gimple_compare_field_offset (f1, f2)
13907 || !gimple_canonical_types_compatible_p
13908 (TREE_TYPE (f1), TREE_TYPE (f2),
13909 trust_type_canonical))
13910 return false;
13913 /* If one aggregate has more fields than the other, they
13914 are not the same. */
13915 if (f1 || f2)
13916 return false;
13918 return true;
13921 default:
13922 /* Consider all types with language specific trees in them mutually
13923 compatible. This is executed only from verify_type and false
13924 positives can be tolerated. */
13925 gcc_assert (!in_lto_p);
13926 return true;
13930 /* For OPAQUE_TYPE T, it should have only size and alignment information
13931 and its mode should be of class MODE_OPAQUE. This function verifies
13932 these properties of T match TV which is the main variant of T and TC
13933 which is the canonical of T. */
13935 static void
13936 verify_opaque_type (const_tree t, tree tv, tree tc)
13938 gcc_assert (OPAQUE_TYPE_P (t));
13939 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13940 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13942 /* For an opaque type T1, check if some of its properties match
13943 the corresponding ones of the other opaque type T2, emit some
13944 error messages for those inconsistent ones. */
13945 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13946 const char *kind_msg)
13948 if (!OPAQUE_TYPE_P (t2))
13950 error ("type %s is not an opaque type", kind_msg);
13951 debug_tree (t2);
13952 return;
13954 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13956 error ("type %s is not with opaque mode", kind_msg);
13957 debug_tree (t2);
13958 return;
13960 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13962 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13963 debug_tree (t2);
13964 return;
13966 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13967 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13968 if (maybe_ne (t1_size, t2_size))
13970 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13971 debug_tree (t2);
13972 return;
13974 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13976 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13977 debug_tree (t2);
13978 return;
13980 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13982 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13983 debug_tree (t2);
13984 return;
13988 if (t != tv)
13989 check_properties_for_opaque_type (t, tv, "variant");
13991 if (t != tc)
13992 check_properties_for_opaque_type (t, tc, "canonical");
13995 /* Verify type T. */
13997 void
13998 verify_type (const_tree t)
14000 bool error_found = false;
14001 tree mv = TYPE_MAIN_VARIANT (t);
14002 tree ct = TYPE_CANONICAL (t);
14004 if (OPAQUE_TYPE_P (t))
14006 verify_opaque_type (t, mv, ct);
14007 return;
14010 if (!mv)
14012 error ("main variant is not defined");
14013 error_found = true;
14015 else if (mv != TYPE_MAIN_VARIANT (mv))
14017 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14018 debug_tree (mv);
14019 error_found = true;
14021 else if (t != mv && !verify_type_variant (t, mv))
14022 error_found = true;
14024 if (!ct)
14026 else if (TYPE_CANONICAL (ct) != ct)
14028 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14029 debug_tree (ct);
14030 error_found = true;
14032 /* Method and function types cannot be used to address memory and thus
14033 TYPE_CANONICAL really matters only for determining useless conversions.
14035 FIXME: C++ FE produce declarations of builtin functions that are not
14036 compatible with main variants. */
14037 else if (TREE_CODE (t) == FUNCTION_TYPE)
14039 else if (t != ct
14040 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14041 with variably sized arrays because their sizes possibly
14042 gimplified to different variables. */
14043 && !variably_modified_type_p (ct, NULL)
14044 && !gimple_canonical_types_compatible_p (t, ct, false)
14045 && COMPLETE_TYPE_P (t))
14047 error ("%<TYPE_CANONICAL%> is not compatible");
14048 debug_tree (ct);
14049 error_found = true;
14052 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14053 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14055 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14056 debug_tree (ct);
14057 error_found = true;
14059 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14061 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14062 debug_tree (ct);
14063 debug_tree (TYPE_MAIN_VARIANT (ct));
14064 error_found = true;
14068 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14069 if (RECORD_OR_UNION_TYPE_P (t))
14071 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14072 and danagle the pointer from time to time. */
14073 if (TYPE_VFIELD (t)
14074 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14075 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14077 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14078 debug_tree (TYPE_VFIELD (t));
14079 error_found = true;
14082 else if (TREE_CODE (t) == POINTER_TYPE)
14084 if (TYPE_NEXT_PTR_TO (t)
14085 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14087 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14088 debug_tree (TYPE_NEXT_PTR_TO (t));
14089 error_found = true;
14092 else if (TREE_CODE (t) == REFERENCE_TYPE)
14094 if (TYPE_NEXT_REF_TO (t)
14095 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14097 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14098 debug_tree (TYPE_NEXT_REF_TO (t));
14099 error_found = true;
14102 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14103 || FIXED_POINT_TYPE_P (t))
14105 /* FIXME: The following check should pass:
14106 useless_type_conversion_p (const_cast <tree> (t),
14107 TREE_TYPE (TYPE_MIN_VALUE (t))
14108 but does not for C sizetypes in LTO. */
14111 /* Check various uses of TYPE_MAXVAL_RAW. */
14112 if (RECORD_OR_UNION_TYPE_P (t))
14114 if (!TYPE_BINFO (t))
14116 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14118 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14119 debug_tree (TYPE_BINFO (t));
14120 error_found = true;
14122 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14124 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14125 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14126 error_found = true;
14129 else if (FUNC_OR_METHOD_TYPE_P (t))
14131 if (TYPE_METHOD_BASETYPE (t)
14132 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14133 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14135 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14136 debug_tree (TYPE_METHOD_BASETYPE (t));
14137 error_found = true;
14140 else if (TREE_CODE (t) == OFFSET_TYPE)
14142 if (TYPE_OFFSET_BASETYPE (t)
14143 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14144 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14146 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14147 debug_tree (TYPE_OFFSET_BASETYPE (t));
14148 error_found = true;
14151 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14152 || FIXED_POINT_TYPE_P (t))
14154 /* FIXME: The following check should pass:
14155 useless_type_conversion_p (const_cast <tree> (t),
14156 TREE_TYPE (TYPE_MAX_VALUE (t))
14157 but does not for C sizetypes in LTO. */
14159 else if (TREE_CODE (t) == ARRAY_TYPE)
14161 if (TYPE_ARRAY_MAX_SIZE (t)
14162 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14164 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14165 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14166 error_found = true;
14169 else if (TYPE_MAX_VALUE_RAW (t))
14171 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14172 debug_tree (TYPE_MAX_VALUE_RAW (t));
14173 error_found = true;
14176 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14178 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14179 debug_tree (TYPE_LANG_SLOT_1 (t));
14180 error_found = true;
14183 /* Check various uses of TYPE_VALUES_RAW. */
14184 if (TREE_CODE (t) == ENUMERAL_TYPE)
14185 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14187 tree value = TREE_VALUE (l);
14188 tree name = TREE_PURPOSE (l);
14190 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14191 CONST_DECL of ENUMERAL TYPE. */
14192 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14194 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14195 debug_tree (value);
14196 debug_tree (name);
14197 error_found = true;
14199 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14200 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14201 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14203 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14204 "to the enum");
14205 debug_tree (value);
14206 debug_tree (name);
14207 error_found = true;
14209 if (TREE_CODE (name) != IDENTIFIER_NODE)
14211 error ("enum value name is not %<IDENTIFIER_NODE%>");
14212 debug_tree (value);
14213 debug_tree (name);
14214 error_found = true;
14217 else if (TREE_CODE (t) == ARRAY_TYPE)
14219 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14221 error ("array %<TYPE_DOMAIN%> is not integer type");
14222 debug_tree (TYPE_DOMAIN (t));
14223 error_found = true;
14226 else if (RECORD_OR_UNION_TYPE_P (t))
14228 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14230 error ("%<TYPE_FIELDS%> defined in incomplete type");
14231 error_found = true;
14233 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14235 /* TODO: verify properties of decls. */
14236 if (TREE_CODE (fld) == FIELD_DECL)
14238 else if (TREE_CODE (fld) == TYPE_DECL)
14240 else if (TREE_CODE (fld) == CONST_DECL)
14242 else if (VAR_P (fld))
14244 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14246 else if (TREE_CODE (fld) == USING_DECL)
14248 else if (TREE_CODE (fld) == FUNCTION_DECL)
14250 else
14252 error ("wrong tree in %<TYPE_FIELDS%> list");
14253 debug_tree (fld);
14254 error_found = true;
14258 else if (TREE_CODE (t) == INTEGER_TYPE
14259 || TREE_CODE (t) == BOOLEAN_TYPE
14260 || TREE_CODE (t) == BITINT_TYPE
14261 || TREE_CODE (t) == OFFSET_TYPE
14262 || TREE_CODE (t) == REFERENCE_TYPE
14263 || TREE_CODE (t) == NULLPTR_TYPE
14264 || TREE_CODE (t) == POINTER_TYPE)
14266 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14268 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14269 "is %p",
14270 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14271 error_found = true;
14273 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14275 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14276 debug_tree (TYPE_CACHED_VALUES (t));
14277 error_found = true;
14279 /* Verify just enough of cache to ensure that no one copied it to new type.
14280 All copying should go by copy_node that should clear it. */
14281 else if (TYPE_CACHED_VALUES_P (t))
14283 int i;
14284 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14285 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14286 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14288 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14289 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14290 error_found = true;
14291 break;
14295 else if (FUNC_OR_METHOD_TYPE_P (t))
14296 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14298 /* C++ FE uses TREE_PURPOSE to store initial values. */
14299 if (TREE_PURPOSE (l) && in_lto_p)
14301 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14302 debug_tree (l);
14303 error_found = true;
14305 if (!TYPE_P (TREE_VALUE (l)))
14307 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14308 debug_tree (l);
14309 error_found = true;
14312 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14314 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14315 debug_tree (TYPE_VALUES_RAW (t));
14316 error_found = true;
14318 if (TREE_CODE (t) != INTEGER_TYPE
14319 && TREE_CODE (t) != BOOLEAN_TYPE
14320 && TREE_CODE (t) != BITINT_TYPE
14321 && TREE_CODE (t) != OFFSET_TYPE
14322 && TREE_CODE (t) != REFERENCE_TYPE
14323 && TREE_CODE (t) != NULLPTR_TYPE
14324 && TREE_CODE (t) != POINTER_TYPE
14325 && TYPE_CACHED_VALUES_P (t))
14327 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14328 error_found = true;
14331 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14332 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14333 of a type. */
14334 if (TREE_CODE (t) == METHOD_TYPE
14335 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14337 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14338 error_found = true;
14341 if (error_found)
14343 debug_tree (const_cast <tree> (t));
14344 internal_error ("%qs failed", __func__);
14349 /* Return 1 if ARG interpreted as signed in its precision is known to be
14350 always positive or 2 if ARG is known to be always negative, or 3 if
14351 ARG may be positive or negative. */
14354 get_range_pos_neg (tree arg)
14356 if (arg == error_mark_node)
14357 return 3;
14359 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14360 int cnt = 0;
14361 if (TREE_CODE (arg) == INTEGER_CST)
14363 wide_int w = wi::sext (wi::to_wide (arg), prec);
14364 if (wi::neg_p (w))
14365 return 2;
14366 else
14367 return 1;
14369 while (CONVERT_EXPR_P (arg)
14370 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14371 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14373 arg = TREE_OPERAND (arg, 0);
14374 /* Narrower value zero extended into wider type
14375 will always result in positive values. */
14376 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14377 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14378 return 1;
14379 prec = TYPE_PRECISION (TREE_TYPE (arg));
14380 if (++cnt > 30)
14381 return 3;
14384 if (TREE_CODE (arg) != SSA_NAME)
14385 return 3;
14386 value_range r;
14387 while (!get_global_range_query ()->range_of_expr (r, arg)
14388 || r.undefined_p () || r.varying_p ())
14390 gimple *g = SSA_NAME_DEF_STMT (arg);
14391 if (is_gimple_assign (g)
14392 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14394 tree t = gimple_assign_rhs1 (g);
14395 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14396 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14398 if (TYPE_UNSIGNED (TREE_TYPE (t))
14399 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14400 return 1;
14401 prec = TYPE_PRECISION (TREE_TYPE (t));
14402 arg = t;
14403 if (++cnt > 30)
14404 return 3;
14405 continue;
14408 return 3;
14410 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14412 /* For unsigned values, the "positive" range comes
14413 below the "negative" range. */
14414 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14415 return 1;
14416 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14417 return 2;
14419 else
14421 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14422 return 1;
14423 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14424 return 2;
14426 return 3;
14432 /* Return true if ARG is marked with the nonnull attribute in the
14433 current function signature. */
14435 bool
14436 nonnull_arg_p (const_tree arg)
14438 tree t, attrs, fntype;
14439 unsigned HOST_WIDE_INT arg_num;
14441 gcc_assert (TREE_CODE (arg) == PARM_DECL
14442 && (POINTER_TYPE_P (TREE_TYPE (arg))
14443 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14445 /* The static chain decl is always non null. */
14446 if (arg == cfun->static_chain_decl)
14447 return true;
14449 /* THIS argument of method is always non-NULL. */
14450 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14451 && arg == DECL_ARGUMENTS (cfun->decl)
14452 && flag_delete_null_pointer_checks)
14453 return true;
14455 /* Values passed by reference are always non-NULL. */
14456 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14457 && flag_delete_null_pointer_checks)
14458 return true;
14460 fntype = TREE_TYPE (cfun->decl);
14461 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14463 attrs = lookup_attribute ("nonnull", attrs);
14465 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14466 if (attrs == NULL_TREE)
14467 return false;
14469 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14470 if (TREE_VALUE (attrs) == NULL_TREE)
14471 return true;
14473 /* Get the position number for ARG in the function signature. */
14474 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14476 t = DECL_CHAIN (t), arg_num++)
14478 if (t == arg)
14479 break;
14482 gcc_assert (t == arg);
14484 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14485 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14487 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14488 return true;
14492 return false;
14495 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14496 information. */
14498 location_t
14499 set_block (location_t loc, tree block)
14501 location_t pure_loc = get_pure_location (loc);
14502 source_range src_range = get_range_from_loc (line_table, loc);
14503 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14504 return line_table->get_or_create_combined_loc (pure_loc, src_range, block,
14505 discriminator);
14508 location_t
14509 set_source_range (tree expr, location_t start, location_t finish)
14511 source_range src_range;
14512 src_range.m_start = start;
14513 src_range.m_finish = finish;
14514 return set_source_range (expr, src_range);
14517 location_t
14518 set_source_range (tree expr, source_range src_range)
14520 if (!EXPR_P (expr))
14521 return UNKNOWN_LOCATION;
14523 location_t expr_location = EXPR_LOCATION (expr);
14524 location_t pure_loc = get_pure_location (expr_location);
14525 unsigned discriminator = get_discriminator_from_loc (expr_location);
14526 location_t adhoc = line_table->get_or_create_combined_loc (pure_loc,
14527 src_range,
14528 nullptr,
14529 discriminator);
14530 SET_EXPR_LOCATION (expr, adhoc);
14531 return adhoc;
14534 /* Return EXPR, potentially wrapped with a node expression LOC,
14535 if !CAN_HAVE_LOCATION_P (expr).
14537 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14538 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14540 Wrapper nodes can be identified using location_wrapper_p. */
14542 tree
14543 maybe_wrap_with_location (tree expr, location_t loc)
14545 if (expr == NULL)
14546 return NULL;
14547 if (loc == UNKNOWN_LOCATION)
14548 return expr;
14549 if (CAN_HAVE_LOCATION_P (expr))
14550 return expr;
14551 /* We should only be adding wrappers for constants and for decls,
14552 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14553 gcc_assert (CONSTANT_CLASS_P (expr)
14554 || DECL_P (expr)
14555 || EXCEPTIONAL_CLASS_P (expr));
14557 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14558 any impact of the wrapper nodes. */
14559 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14560 return expr;
14562 /* Compiler-generated temporary variables don't need a wrapper. */
14563 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14564 return expr;
14566 /* If any auto_suppress_location_wrappers are active, don't create
14567 wrappers. */
14568 if (suppress_location_wrappers > 0)
14569 return expr;
14571 tree_code code
14572 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14573 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14574 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14575 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14576 /* Mark this node as being a wrapper. */
14577 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14578 return wrapper;
14581 int suppress_location_wrappers;
14583 /* Return the name of combined function FN, for debugging purposes. */
14585 const char *
14586 combined_fn_name (combined_fn fn)
14588 if (builtin_fn_p (fn))
14590 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14591 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14593 else
14594 return internal_fn_name (as_internal_fn (fn));
14597 /* Return a bitmap with a bit set corresponding to each argument in
14598 a function call type FNTYPE declared with attribute nonnull,
14599 or null if none of the function's argument are nonnull. The caller
14600 must free the bitmap. */
14602 bitmap
14603 get_nonnull_args (const_tree fntype)
14605 if (fntype == NULL_TREE)
14606 return NULL;
14608 bitmap argmap = NULL;
14609 if (TREE_CODE (fntype) == METHOD_TYPE)
14611 /* The this pointer in C++ non-static member functions is
14612 implicitly nonnull whether or not it's declared as such. */
14613 argmap = BITMAP_ALLOC (NULL);
14614 bitmap_set_bit (argmap, 0);
14617 tree attrs = TYPE_ATTRIBUTES (fntype);
14618 if (!attrs)
14619 return argmap;
14621 /* A function declaration can specify multiple attribute nonnull,
14622 each with zero or more arguments. The loop below creates a bitmap
14623 representing a union of all the arguments. An empty (but non-null)
14624 bitmap means that all arguments have been declaraed nonnull. */
14625 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14627 attrs = lookup_attribute ("nonnull", attrs);
14628 if (!attrs)
14629 break;
14631 if (!argmap)
14632 argmap = BITMAP_ALLOC (NULL);
14634 if (!TREE_VALUE (attrs))
14636 /* Clear the bitmap in case a previous attribute nonnull
14637 set it and this one overrides it for all arguments. */
14638 bitmap_clear (argmap);
14639 return argmap;
14642 /* Iterate over the indices of the format arguments declared nonnull
14643 and set a bit for each. */
14644 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14646 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14647 bitmap_set_bit (argmap, val);
14651 return argmap;
14654 /* Returns true if TYPE is a type where it and all of its subobjects
14655 (recursively) are of structure, union, or array type. */
14657 bool
14658 is_empty_type (const_tree type)
14660 if (RECORD_OR_UNION_TYPE_P (type))
14662 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14663 if (TREE_CODE (field) == FIELD_DECL
14664 && !DECL_PADDING_P (field)
14665 && !is_empty_type (TREE_TYPE (field)))
14666 return false;
14667 return true;
14669 else if (TREE_CODE (type) == ARRAY_TYPE)
14670 return (integer_minus_onep (array_type_nelts (type))
14671 || TYPE_DOMAIN (type) == NULL_TREE
14672 || is_empty_type (TREE_TYPE (type)));
14673 return false;
14676 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14677 that shouldn't be passed via stack. */
14679 bool
14680 default_is_empty_record (const_tree type)
14682 if (!abi_version_at_least (12))
14683 return false;
14685 if (type == error_mark_node)
14686 return false;
14688 if (TREE_ADDRESSABLE (type))
14689 return false;
14691 return is_empty_type (TYPE_MAIN_VARIANT (type));
14694 /* Determine whether TYPE is a structure with a flexible array member,
14695 or a union containing such a structure (possibly recursively). */
14697 bool
14698 flexible_array_type_p (const_tree type)
14700 tree x, last;
14701 switch (TREE_CODE (type))
14703 case RECORD_TYPE:
14704 last = NULL_TREE;
14705 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14706 if (TREE_CODE (x) == FIELD_DECL)
14707 last = x;
14708 if (last == NULL_TREE)
14709 return false;
14710 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14711 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14712 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14713 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14714 return true;
14715 return false;
14716 case UNION_TYPE:
14717 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14719 if (TREE_CODE (x) == FIELD_DECL
14720 && flexible_array_type_p (TREE_TYPE (x)))
14721 return true;
14723 return false;
14724 default:
14725 return false;
14729 /* Like int_size_in_bytes, but handle empty records specially. */
14731 HOST_WIDE_INT
14732 arg_int_size_in_bytes (const_tree type)
14734 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14737 /* Like size_in_bytes, but handle empty records specially. */
14739 tree
14740 arg_size_in_bytes (const_tree type)
14742 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14745 /* Return true if an expression with CODE has to have the same result type as
14746 its first operand. */
14748 bool
14749 expr_type_first_operand_type_p (tree_code code)
14751 switch (code)
14753 case NEGATE_EXPR:
14754 case ABS_EXPR:
14755 case BIT_NOT_EXPR:
14756 case PAREN_EXPR:
14757 case CONJ_EXPR:
14759 case PLUS_EXPR:
14760 case MINUS_EXPR:
14761 case MULT_EXPR:
14762 case TRUNC_DIV_EXPR:
14763 case CEIL_DIV_EXPR:
14764 case FLOOR_DIV_EXPR:
14765 case ROUND_DIV_EXPR:
14766 case TRUNC_MOD_EXPR:
14767 case CEIL_MOD_EXPR:
14768 case FLOOR_MOD_EXPR:
14769 case ROUND_MOD_EXPR:
14770 case RDIV_EXPR:
14771 case EXACT_DIV_EXPR:
14772 case MIN_EXPR:
14773 case MAX_EXPR:
14774 case BIT_IOR_EXPR:
14775 case BIT_XOR_EXPR:
14776 case BIT_AND_EXPR:
14778 case LSHIFT_EXPR:
14779 case RSHIFT_EXPR:
14780 case LROTATE_EXPR:
14781 case RROTATE_EXPR:
14782 return true;
14784 default:
14785 return false;
14789 /* Return a typenode for the "standard" C type with a given name. */
14790 tree
14791 get_typenode_from_name (const char *name)
14793 if (name == NULL || *name == '\0')
14794 return NULL_TREE;
14796 if (strcmp (name, "char") == 0)
14797 return char_type_node;
14798 if (strcmp (name, "unsigned char") == 0)
14799 return unsigned_char_type_node;
14800 if (strcmp (name, "signed char") == 0)
14801 return signed_char_type_node;
14803 if (strcmp (name, "short int") == 0)
14804 return short_integer_type_node;
14805 if (strcmp (name, "short unsigned int") == 0)
14806 return short_unsigned_type_node;
14808 if (strcmp (name, "int") == 0)
14809 return integer_type_node;
14810 if (strcmp (name, "unsigned int") == 0)
14811 return unsigned_type_node;
14813 if (strcmp (name, "long int") == 0)
14814 return long_integer_type_node;
14815 if (strcmp (name, "long unsigned int") == 0)
14816 return long_unsigned_type_node;
14818 if (strcmp (name, "long long int") == 0)
14819 return long_long_integer_type_node;
14820 if (strcmp (name, "long long unsigned int") == 0)
14821 return long_long_unsigned_type_node;
14823 gcc_unreachable ();
14826 /* List of pointer types used to declare builtins before we have seen their
14827 real declaration.
14829 Keep the size up to date in tree.h ! */
14830 const builtin_structptr_type builtin_structptr_types[6] =
14832 { fileptr_type_node, ptr_type_node, "FILE" },
14833 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14834 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14835 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14836 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14837 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14840 /* Return the maximum object size. */
14842 tree
14843 max_object_size (void)
14845 /* To do: Make this a configurable parameter. */
14846 return TYPE_MAX_VALUE (ptrdiff_type_node);
14849 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14850 parameter default to false and that weeds out error_mark_node. */
14852 bool
14853 verify_type_context (location_t loc, type_context_kind context,
14854 const_tree type, bool silent_p)
14856 if (type == error_mark_node)
14857 return true;
14859 gcc_assert (TYPE_P (type));
14860 return (!targetm.verify_type_context
14861 || targetm.verify_type_context (loc, context, type, silent_p));
14864 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14865 delete operators. Return false if they may or may not name such
14866 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14867 do not. */
14869 bool
14870 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14871 bool *pcertain /* = NULL */)
14873 bool certain;
14874 if (!pcertain)
14875 pcertain = &certain;
14877 const char *new_name = IDENTIFIER_POINTER (new_asm);
14878 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14879 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14880 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14882 /* The following failures are due to invalid names so they're not
14883 considered certain mismatches. */
14884 *pcertain = false;
14886 if (new_len < 5 || delete_len < 6)
14887 return false;
14888 if (new_name[0] == '_')
14889 ++new_name, --new_len;
14890 if (new_name[0] == '_')
14891 ++new_name, --new_len;
14892 if (delete_name[0] == '_')
14893 ++delete_name, --delete_len;
14894 if (delete_name[0] == '_')
14895 ++delete_name, --delete_len;
14896 if (new_len < 4 || delete_len < 5)
14897 return false;
14899 /* The following failures are due to names of user-defined operators
14900 so they're also not considered certain mismatches. */
14902 /* *_len is now just the length after initial underscores. */
14903 if (new_name[0] != 'Z' || new_name[1] != 'n')
14904 return false;
14905 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14906 return false;
14908 /* The following failures are certain mismatches. */
14909 *pcertain = true;
14911 /* _Znw must match _Zdl, _Zna must match _Zda. */
14912 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14913 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14914 return false;
14915 /* 'j', 'm' and 'y' correspond to size_t. */
14916 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14917 return false;
14918 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14919 return false;
14920 if (new_len == 4
14921 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14923 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14924 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14925 if (delete_len == 5)
14926 return true;
14927 if (delete_len == 6 && delete_name[5] == new_name[3])
14928 return true;
14929 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14930 return true;
14932 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14933 || (new_len == 33
14934 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14936 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14937 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14938 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14939 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14940 return true;
14941 if (delete_len == 21
14942 && delete_name[5] == new_name[3]
14943 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14944 return true;
14945 if (delete_len == 34
14946 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14947 return true;
14950 /* The negative result is conservative. */
14951 *pcertain = false;
14952 return false;
14955 /* Return the zero-based number corresponding to the argument being
14956 deallocated if FNDECL is a deallocation function or an out-of-bounds
14957 value if it isn't. */
14959 unsigned
14960 fndecl_dealloc_argno (tree fndecl)
14962 /* A call to operator delete isn't recognized as one to a built-in. */
14963 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14965 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14966 return 0;
14968 /* Avoid placement delete that's not been inlined. */
14969 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14970 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14971 || id_equal (fname, "_ZdaPvS_")) // array form
14972 return UINT_MAX;
14973 return 0;
14976 /* TODO: Handle user-defined functions with attribute malloc? Handle
14977 known non-built-ins like fopen? */
14978 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14980 switch (DECL_FUNCTION_CODE (fndecl))
14982 case BUILT_IN_FREE:
14983 case BUILT_IN_REALLOC:
14984 return 0;
14985 default:
14986 break;
14988 return UINT_MAX;
14991 tree attrs = DECL_ATTRIBUTES (fndecl);
14992 if (!attrs)
14993 return UINT_MAX;
14995 for (tree atfree = attrs;
14996 (atfree = lookup_attribute ("*dealloc", atfree));
14997 atfree = TREE_CHAIN (atfree))
14999 tree alloc = TREE_VALUE (atfree);
15000 if (!alloc)
15001 continue;
15003 tree pos = TREE_CHAIN (alloc);
15004 if (!pos)
15005 return 0;
15007 pos = TREE_VALUE (pos);
15008 return TREE_INT_CST_LOW (pos) - 1;
15011 return UINT_MAX;
15014 /* If EXPR refers to a character array or pointer declared attribute
15015 nonstring, return a decl for that array or pointer and set *REF
15016 to the referenced enclosing object or pointer. Otherwise return
15017 null. */
15019 tree
15020 get_attr_nonstring_decl (tree expr, tree *ref)
15022 tree decl = expr;
15023 tree var = NULL_TREE;
15024 if (TREE_CODE (decl) == SSA_NAME)
15026 gimple *def = SSA_NAME_DEF_STMT (decl);
15028 if (is_gimple_assign (def))
15030 tree_code code = gimple_assign_rhs_code (def);
15031 if (code == ADDR_EXPR
15032 || code == COMPONENT_REF
15033 || code == VAR_DECL)
15034 decl = gimple_assign_rhs1 (def);
15036 else
15037 var = SSA_NAME_VAR (decl);
15040 if (TREE_CODE (decl) == ADDR_EXPR)
15041 decl = TREE_OPERAND (decl, 0);
15043 /* To simplify calling code, store the referenced DECL regardless of
15044 the attribute determined below, but avoid storing the SSA_NAME_VAR
15045 obtained above (it's not useful for dataflow purposes). */
15046 if (ref)
15047 *ref = decl;
15049 /* Use the SSA_NAME_VAR that was determined above to see if it's
15050 declared nonstring. Otherwise drill down into the referenced
15051 DECL. */
15052 if (var)
15053 decl = var;
15054 else if (TREE_CODE (decl) == ARRAY_REF)
15055 decl = TREE_OPERAND (decl, 0);
15056 else if (TREE_CODE (decl) == COMPONENT_REF)
15057 decl = TREE_OPERAND (decl, 1);
15058 else if (TREE_CODE (decl) == MEM_REF)
15059 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15061 if (DECL_P (decl)
15062 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15063 return decl;
15065 return NULL_TREE;
15068 /* Return length of attribute names string,
15069 if arglist chain > 1, -1 otherwise. */
15072 get_target_clone_attr_len (tree arglist)
15074 tree arg;
15075 int str_len_sum = 0;
15076 int argnum = 0;
15078 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15080 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15081 size_t len = strlen (str);
15082 str_len_sum += len + 1;
15083 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15084 argnum++;
15085 argnum++;
15087 if (argnum <= 1)
15088 return -1;
15089 return str_len_sum;
15092 void
15093 tree_cc_finalize (void)
15095 clear_nonstandard_integer_type_cache ();
15096 vec_free (bitint_type_cache);
15099 #if CHECKING_P
15101 namespace selftest {
15103 /* Selftests for tree. */
15105 /* Verify that integer constants are sane. */
15107 static void
15108 test_integer_constants ()
15110 ASSERT_TRUE (integer_type_node != NULL);
15111 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15113 tree type = integer_type_node;
15115 tree zero = build_zero_cst (type);
15116 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15117 ASSERT_EQ (type, TREE_TYPE (zero));
15119 tree one = build_int_cst (type, 1);
15120 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15121 ASSERT_EQ (type, TREE_TYPE (zero));
15124 /* Verify identifiers. */
15126 static void
15127 test_identifiers ()
15129 tree identifier = get_identifier ("foo");
15130 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15131 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15134 /* Verify LABEL_DECL. */
15136 static void
15137 test_labels ()
15139 tree identifier = get_identifier ("err");
15140 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15141 identifier, void_type_node);
15142 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15143 ASSERT_FALSE (FORCED_LABEL (label_decl));
15146 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15147 are given by VALS. */
15149 static tree
15150 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15152 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15153 tree_vector_builder builder (type, vals.length (), 1);
15154 builder.splice (vals);
15155 return builder.build ();
15158 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15160 static void
15161 check_vector_cst (const vec<tree> &expected, tree actual)
15163 ASSERT_KNOWN_EQ (expected.length (),
15164 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15165 for (unsigned int i = 0; i < expected.length (); ++i)
15166 ASSERT_EQ (wi::to_wide (expected[i]),
15167 wi::to_wide (vector_cst_elt (actual, i)));
15170 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15171 and that its elements match EXPECTED. */
15173 static void
15174 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15175 unsigned int npatterns)
15177 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15178 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15179 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15180 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15181 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15182 check_vector_cst (expected, actual);
15185 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15186 and NPATTERNS background elements, and that its elements match
15187 EXPECTED. */
15189 static void
15190 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15191 unsigned int npatterns)
15193 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15194 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15195 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15196 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15197 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15198 check_vector_cst (expected, actual);
15201 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15202 and that its elements match EXPECTED. */
15204 static void
15205 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15206 unsigned int npatterns)
15208 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15209 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15210 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15211 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15212 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15213 check_vector_cst (expected, actual);
15216 /* Test the creation of VECTOR_CSTs. */
15218 static void
15219 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15221 auto_vec<tree, 8> elements (8);
15222 elements.quick_grow (8);
15223 tree element_type = build_nonstandard_integer_type (16, true);
15224 tree vector_type = build_vector_type (element_type, 8);
15226 /* Test a simple linear series with a base of 0 and a step of 1:
15227 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15228 for (unsigned int i = 0; i < 8; ++i)
15229 elements[i] = build_int_cst (element_type, i);
15230 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15231 check_vector_cst_stepped (elements, vector, 1);
15233 /* Try the same with the first element replaced by 100:
15234 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15235 elements[0] = build_int_cst (element_type, 100);
15236 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15237 check_vector_cst_stepped (elements, vector, 1);
15239 /* Try a series that wraps around.
15240 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15241 for (unsigned int i = 1; i < 8; ++i)
15242 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15243 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15244 check_vector_cst_stepped (elements, vector, 1);
15246 /* Try a downward series:
15247 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15248 for (unsigned int i = 1; i < 8; ++i)
15249 elements[i] = build_int_cst (element_type, 80 - i);
15250 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15251 check_vector_cst_stepped (elements, vector, 1);
15253 /* Try two interleaved series with different bases and steps:
15254 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15255 elements[1] = build_int_cst (element_type, 53);
15256 for (unsigned int i = 2; i < 8; i += 2)
15258 elements[i] = build_int_cst (element_type, 70 - i * 2);
15259 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15261 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15262 check_vector_cst_stepped (elements, vector, 2);
15264 /* Try a duplicated value:
15265 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15266 for (unsigned int i = 1; i < 8; ++i)
15267 elements[i] = elements[0];
15268 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15269 check_vector_cst_duplicate (elements, vector, 1);
15271 /* Try an interleaved duplicated value:
15272 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15273 elements[1] = build_int_cst (element_type, 55);
15274 for (unsigned int i = 2; i < 8; ++i)
15275 elements[i] = elements[i - 2];
15276 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15277 check_vector_cst_duplicate (elements, vector, 2);
15279 /* Try a duplicated value with 2 exceptions
15280 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15281 elements[0] = build_int_cst (element_type, 41);
15282 elements[1] = build_int_cst (element_type, 97);
15283 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15284 check_vector_cst_fill (elements, vector, 2);
15286 /* Try with and without a step
15287 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15288 for (unsigned int i = 3; i < 8; i += 2)
15289 elements[i] = build_int_cst (element_type, i * 7);
15290 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15291 check_vector_cst_stepped (elements, vector, 2);
15293 /* Try a fully-general constant:
15294 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15295 elements[5] = build_int_cst (element_type, 9990);
15296 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15297 check_vector_cst_fill (elements, vector, 4);
15300 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15301 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15302 modifying its argument in-place. */
15304 static void
15305 check_strip_nops (tree node, tree expected)
15307 STRIP_NOPS (node);
15308 ASSERT_EQ (expected, node);
15311 /* Verify location wrappers. */
15313 static void
15314 test_location_wrappers ()
15316 location_t loc = BUILTINS_LOCATION;
15318 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15320 /* Wrapping a constant. */
15321 tree int_cst = build_int_cst (integer_type_node, 42);
15322 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15323 ASSERT_FALSE (location_wrapper_p (int_cst));
15325 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15326 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15327 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15328 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15330 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15331 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15333 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15334 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15335 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15336 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15338 /* Wrapping a STRING_CST. */
15339 tree string_cst = build_string (4, "foo");
15340 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15341 ASSERT_FALSE (location_wrapper_p (string_cst));
15343 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15344 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15345 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15346 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15347 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15350 /* Wrapping a variable. */
15351 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15352 get_identifier ("some_int_var"),
15353 integer_type_node);
15354 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15355 ASSERT_FALSE (location_wrapper_p (int_var));
15357 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15358 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15359 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15360 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15362 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15363 wrapper. */
15364 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15365 ASSERT_FALSE (location_wrapper_p (r_cast));
15366 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15368 /* Verify that STRIP_NOPS removes wrappers. */
15369 check_strip_nops (wrapped_int_cst, int_cst);
15370 check_strip_nops (wrapped_string_cst, string_cst);
15371 check_strip_nops (wrapped_int_var, int_var);
15374 /* Test various tree predicates. Verify that location wrappers don't
15375 affect the results. */
15377 static void
15378 test_predicates ()
15380 /* Build various constants and wrappers around them. */
15382 location_t loc = BUILTINS_LOCATION;
15384 tree i_0 = build_int_cst (integer_type_node, 0);
15385 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15387 tree i_1 = build_int_cst (integer_type_node, 1);
15388 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15390 tree i_m1 = build_int_cst (integer_type_node, -1);
15391 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15393 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15394 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15395 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15396 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15397 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15398 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15400 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15401 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15402 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15404 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15405 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15406 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15408 /* TODO: vector constants. */
15410 /* Test integer_onep. */
15411 ASSERT_FALSE (integer_onep (i_0));
15412 ASSERT_FALSE (integer_onep (wr_i_0));
15413 ASSERT_TRUE (integer_onep (i_1));
15414 ASSERT_TRUE (integer_onep (wr_i_1));
15415 ASSERT_FALSE (integer_onep (i_m1));
15416 ASSERT_FALSE (integer_onep (wr_i_m1));
15417 ASSERT_FALSE (integer_onep (f_0));
15418 ASSERT_FALSE (integer_onep (wr_f_0));
15419 ASSERT_FALSE (integer_onep (f_1));
15420 ASSERT_FALSE (integer_onep (wr_f_1));
15421 ASSERT_FALSE (integer_onep (f_m1));
15422 ASSERT_FALSE (integer_onep (wr_f_m1));
15423 ASSERT_FALSE (integer_onep (c_i_0));
15424 ASSERT_TRUE (integer_onep (c_i_1));
15425 ASSERT_FALSE (integer_onep (c_i_m1));
15426 ASSERT_FALSE (integer_onep (c_f_0));
15427 ASSERT_FALSE (integer_onep (c_f_1));
15428 ASSERT_FALSE (integer_onep (c_f_m1));
15430 /* Test integer_zerop. */
15431 ASSERT_TRUE (integer_zerop (i_0));
15432 ASSERT_TRUE (integer_zerop (wr_i_0));
15433 ASSERT_FALSE (integer_zerop (i_1));
15434 ASSERT_FALSE (integer_zerop (wr_i_1));
15435 ASSERT_FALSE (integer_zerop (i_m1));
15436 ASSERT_FALSE (integer_zerop (wr_i_m1));
15437 ASSERT_FALSE (integer_zerop (f_0));
15438 ASSERT_FALSE (integer_zerop (wr_f_0));
15439 ASSERT_FALSE (integer_zerop (f_1));
15440 ASSERT_FALSE (integer_zerop (wr_f_1));
15441 ASSERT_FALSE (integer_zerop (f_m1));
15442 ASSERT_FALSE (integer_zerop (wr_f_m1));
15443 ASSERT_TRUE (integer_zerop (c_i_0));
15444 ASSERT_FALSE (integer_zerop (c_i_1));
15445 ASSERT_FALSE (integer_zerop (c_i_m1));
15446 ASSERT_FALSE (integer_zerop (c_f_0));
15447 ASSERT_FALSE (integer_zerop (c_f_1));
15448 ASSERT_FALSE (integer_zerop (c_f_m1));
15450 /* Test integer_all_onesp. */
15451 ASSERT_FALSE (integer_all_onesp (i_0));
15452 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15453 ASSERT_FALSE (integer_all_onesp (i_1));
15454 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15455 ASSERT_TRUE (integer_all_onesp (i_m1));
15456 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15457 ASSERT_FALSE (integer_all_onesp (f_0));
15458 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15459 ASSERT_FALSE (integer_all_onesp (f_1));
15460 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15461 ASSERT_FALSE (integer_all_onesp (f_m1));
15462 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15463 ASSERT_FALSE (integer_all_onesp (c_i_0));
15464 ASSERT_FALSE (integer_all_onesp (c_i_1));
15465 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15466 ASSERT_FALSE (integer_all_onesp (c_f_0));
15467 ASSERT_FALSE (integer_all_onesp (c_f_1));
15468 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15470 /* Test integer_minus_onep. */
15471 ASSERT_FALSE (integer_minus_onep (i_0));
15472 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15473 ASSERT_FALSE (integer_minus_onep (i_1));
15474 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15475 ASSERT_TRUE (integer_minus_onep (i_m1));
15476 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15477 ASSERT_FALSE (integer_minus_onep (f_0));
15478 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15479 ASSERT_FALSE (integer_minus_onep (f_1));
15480 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15481 ASSERT_FALSE (integer_minus_onep (f_m1));
15482 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15483 ASSERT_FALSE (integer_minus_onep (c_i_0));
15484 ASSERT_FALSE (integer_minus_onep (c_i_1));
15485 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15486 ASSERT_FALSE (integer_minus_onep (c_f_0));
15487 ASSERT_FALSE (integer_minus_onep (c_f_1));
15488 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15490 /* Test integer_each_onep. */
15491 ASSERT_FALSE (integer_each_onep (i_0));
15492 ASSERT_FALSE (integer_each_onep (wr_i_0));
15493 ASSERT_TRUE (integer_each_onep (i_1));
15494 ASSERT_TRUE (integer_each_onep (wr_i_1));
15495 ASSERT_FALSE (integer_each_onep (i_m1));
15496 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15497 ASSERT_FALSE (integer_each_onep (f_0));
15498 ASSERT_FALSE (integer_each_onep (wr_f_0));
15499 ASSERT_FALSE (integer_each_onep (f_1));
15500 ASSERT_FALSE (integer_each_onep (wr_f_1));
15501 ASSERT_FALSE (integer_each_onep (f_m1));
15502 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15503 ASSERT_FALSE (integer_each_onep (c_i_0));
15504 ASSERT_FALSE (integer_each_onep (c_i_1));
15505 ASSERT_FALSE (integer_each_onep (c_i_m1));
15506 ASSERT_FALSE (integer_each_onep (c_f_0));
15507 ASSERT_FALSE (integer_each_onep (c_f_1));
15508 ASSERT_FALSE (integer_each_onep (c_f_m1));
15510 /* Test integer_truep. */
15511 ASSERT_FALSE (integer_truep (i_0));
15512 ASSERT_FALSE (integer_truep (wr_i_0));
15513 ASSERT_TRUE (integer_truep (i_1));
15514 ASSERT_TRUE (integer_truep (wr_i_1));
15515 ASSERT_FALSE (integer_truep (i_m1));
15516 ASSERT_FALSE (integer_truep (wr_i_m1));
15517 ASSERT_FALSE (integer_truep (f_0));
15518 ASSERT_FALSE (integer_truep (wr_f_0));
15519 ASSERT_FALSE (integer_truep (f_1));
15520 ASSERT_FALSE (integer_truep (wr_f_1));
15521 ASSERT_FALSE (integer_truep (f_m1));
15522 ASSERT_FALSE (integer_truep (wr_f_m1));
15523 ASSERT_FALSE (integer_truep (c_i_0));
15524 ASSERT_TRUE (integer_truep (c_i_1));
15525 ASSERT_FALSE (integer_truep (c_i_m1));
15526 ASSERT_FALSE (integer_truep (c_f_0));
15527 ASSERT_FALSE (integer_truep (c_f_1));
15528 ASSERT_FALSE (integer_truep (c_f_m1));
15530 /* Test integer_nonzerop. */
15531 ASSERT_FALSE (integer_nonzerop (i_0));
15532 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15533 ASSERT_TRUE (integer_nonzerop (i_1));
15534 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15535 ASSERT_TRUE (integer_nonzerop (i_m1));
15536 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15537 ASSERT_FALSE (integer_nonzerop (f_0));
15538 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15539 ASSERT_FALSE (integer_nonzerop (f_1));
15540 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15541 ASSERT_FALSE (integer_nonzerop (f_m1));
15542 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15543 ASSERT_FALSE (integer_nonzerop (c_i_0));
15544 ASSERT_TRUE (integer_nonzerop (c_i_1));
15545 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15546 ASSERT_FALSE (integer_nonzerop (c_f_0));
15547 ASSERT_FALSE (integer_nonzerop (c_f_1));
15548 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15550 /* Test real_zerop. */
15551 ASSERT_FALSE (real_zerop (i_0));
15552 ASSERT_FALSE (real_zerop (wr_i_0));
15553 ASSERT_FALSE (real_zerop (i_1));
15554 ASSERT_FALSE (real_zerop (wr_i_1));
15555 ASSERT_FALSE (real_zerop (i_m1));
15556 ASSERT_FALSE (real_zerop (wr_i_m1));
15557 ASSERT_TRUE (real_zerop (f_0));
15558 ASSERT_TRUE (real_zerop (wr_f_0));
15559 ASSERT_FALSE (real_zerop (f_1));
15560 ASSERT_FALSE (real_zerop (wr_f_1));
15561 ASSERT_FALSE (real_zerop (f_m1));
15562 ASSERT_FALSE (real_zerop (wr_f_m1));
15563 ASSERT_FALSE (real_zerop (c_i_0));
15564 ASSERT_FALSE (real_zerop (c_i_1));
15565 ASSERT_FALSE (real_zerop (c_i_m1));
15566 ASSERT_TRUE (real_zerop (c_f_0));
15567 ASSERT_FALSE (real_zerop (c_f_1));
15568 ASSERT_FALSE (real_zerop (c_f_m1));
15570 /* Test real_onep. */
15571 ASSERT_FALSE (real_onep (i_0));
15572 ASSERT_FALSE (real_onep (wr_i_0));
15573 ASSERT_FALSE (real_onep (i_1));
15574 ASSERT_FALSE (real_onep (wr_i_1));
15575 ASSERT_FALSE (real_onep (i_m1));
15576 ASSERT_FALSE (real_onep (wr_i_m1));
15577 ASSERT_FALSE (real_onep (f_0));
15578 ASSERT_FALSE (real_onep (wr_f_0));
15579 ASSERT_TRUE (real_onep (f_1));
15580 ASSERT_TRUE (real_onep (wr_f_1));
15581 ASSERT_FALSE (real_onep (f_m1));
15582 ASSERT_FALSE (real_onep (wr_f_m1));
15583 ASSERT_FALSE (real_onep (c_i_0));
15584 ASSERT_FALSE (real_onep (c_i_1));
15585 ASSERT_FALSE (real_onep (c_i_m1));
15586 ASSERT_FALSE (real_onep (c_f_0));
15587 ASSERT_TRUE (real_onep (c_f_1));
15588 ASSERT_FALSE (real_onep (c_f_m1));
15590 /* Test real_minus_onep. */
15591 ASSERT_FALSE (real_minus_onep (i_0));
15592 ASSERT_FALSE (real_minus_onep (wr_i_0));
15593 ASSERT_FALSE (real_minus_onep (i_1));
15594 ASSERT_FALSE (real_minus_onep (wr_i_1));
15595 ASSERT_FALSE (real_minus_onep (i_m1));
15596 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15597 ASSERT_FALSE (real_minus_onep (f_0));
15598 ASSERT_FALSE (real_minus_onep (wr_f_0));
15599 ASSERT_FALSE (real_minus_onep (f_1));
15600 ASSERT_FALSE (real_minus_onep (wr_f_1));
15601 ASSERT_TRUE (real_minus_onep (f_m1));
15602 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15603 ASSERT_FALSE (real_minus_onep (c_i_0));
15604 ASSERT_FALSE (real_minus_onep (c_i_1));
15605 ASSERT_FALSE (real_minus_onep (c_i_m1));
15606 ASSERT_FALSE (real_minus_onep (c_f_0));
15607 ASSERT_FALSE (real_minus_onep (c_f_1));
15608 ASSERT_TRUE (real_minus_onep (c_f_m1));
15610 /* Test zerop. */
15611 ASSERT_TRUE (zerop (i_0));
15612 ASSERT_TRUE (zerop (wr_i_0));
15613 ASSERT_FALSE (zerop (i_1));
15614 ASSERT_FALSE (zerop (wr_i_1));
15615 ASSERT_FALSE (zerop (i_m1));
15616 ASSERT_FALSE (zerop (wr_i_m1));
15617 ASSERT_TRUE (zerop (f_0));
15618 ASSERT_TRUE (zerop (wr_f_0));
15619 ASSERT_FALSE (zerop (f_1));
15620 ASSERT_FALSE (zerop (wr_f_1));
15621 ASSERT_FALSE (zerop (f_m1));
15622 ASSERT_FALSE (zerop (wr_f_m1));
15623 ASSERT_TRUE (zerop (c_i_0));
15624 ASSERT_FALSE (zerop (c_i_1));
15625 ASSERT_FALSE (zerop (c_i_m1));
15626 ASSERT_TRUE (zerop (c_f_0));
15627 ASSERT_FALSE (zerop (c_f_1));
15628 ASSERT_FALSE (zerop (c_f_m1));
15630 /* Test tree_expr_nonnegative_p. */
15631 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15632 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15633 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15634 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15635 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15636 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15637 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15638 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15639 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15640 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15641 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15642 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15643 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15644 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15645 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15646 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15647 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15648 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15650 /* Test tree_expr_nonzero_p. */
15651 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15652 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15653 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15654 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15655 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15656 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15658 /* Test integer_valued_real_p. */
15659 ASSERT_FALSE (integer_valued_real_p (i_0));
15660 ASSERT_TRUE (integer_valued_real_p (f_0));
15661 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15662 ASSERT_TRUE (integer_valued_real_p (f_1));
15663 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15665 /* Test integer_pow2p. */
15666 ASSERT_FALSE (integer_pow2p (i_0));
15667 ASSERT_TRUE (integer_pow2p (i_1));
15668 ASSERT_TRUE (integer_pow2p (wr_i_1));
15670 /* Test uniform_integer_cst_p. */
15671 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15672 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15673 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15674 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15675 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15676 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15677 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15678 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15679 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15680 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15681 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15682 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15683 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15684 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15685 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15686 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15687 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15688 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15691 /* Check that string escaping works correctly. */
15693 static void
15694 test_escaped_strings (void)
15696 int saved_cutoff;
15697 escaped_string msg;
15699 msg.escape (NULL);
15700 /* ASSERT_STREQ does not accept NULL as a valid test
15701 result, so we have to use ASSERT_EQ instead. */
15702 ASSERT_EQ (NULL, (const char *) msg);
15704 msg.escape ("");
15705 ASSERT_STREQ ("", (const char *) msg);
15707 msg.escape ("foobar");
15708 ASSERT_STREQ ("foobar", (const char *) msg);
15710 /* Ensure that we have -fmessage-length set to 0. */
15711 saved_cutoff = pp_line_cutoff (global_dc->printer);
15712 pp_line_cutoff (global_dc->printer) = 0;
15714 msg.escape ("foo\nbar");
15715 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15717 msg.escape ("\a\b\f\n\r\t\v");
15718 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15720 /* Now repeat the tests with -fmessage-length set to 5. */
15721 pp_line_cutoff (global_dc->printer) = 5;
15723 /* Note that the newline is not translated into an escape. */
15724 msg.escape ("foo\nbar");
15725 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15727 msg.escape ("\a\b\f\n\r\t\v");
15728 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15730 /* Restore the original message length setting. */
15731 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15734 /* Run all of the selftests within this file. */
15736 void
15737 tree_cc_tests ()
15739 test_integer_constants ();
15740 test_identifiers ();
15741 test_labels ();
15742 test_vector_cst_patterns ();
15743 test_location_wrappers ();
15744 test_predicates ();
15745 test_escaped_strings ();
15748 } // namespace selftest
15750 #endif /* CHECKING_P */
15752 #include "gt-tree.h"