d: Add testcase from PR108962
[official-gcc.git] / gcc / tree.cc
blob58288efa2e2f91ae5bd6f71667c89b4ae3051010
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 1, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 2, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_DEVICE_TYPE */
302 0, /* OMP_CLAUSE_FOR */
303 0, /* OMP_CLAUSE_PARALLEL */
304 0, /* OMP_CLAUSE_SECTIONS */
305 0, /* OMP_CLAUSE_TASKGROUP */
306 1, /* OMP_CLAUSE_PRIORITY */
307 1, /* OMP_CLAUSE_GRAINSIZE */
308 1, /* OMP_CLAUSE_NUM_TASKS */
309 0, /* OMP_CLAUSE_NOGROUP */
310 0, /* OMP_CLAUSE_THREADS */
311 0, /* OMP_CLAUSE_SIMD */
312 1, /* OMP_CLAUSE_HINT */
313 0, /* OMP_CLAUSE_DEFAULTMAP */
314 0, /* OMP_CLAUSE_ORDER */
315 0, /* OMP_CLAUSE_BIND */
316 1, /* OMP_CLAUSE_FILTER */
317 1, /* OMP_CLAUSE__SIMDUID_ */
318 0, /* OMP_CLAUSE__SIMT_ */
319 0, /* OMP_CLAUSE_INDEPENDENT */
320 1, /* OMP_CLAUSE_WORKER */
321 1, /* OMP_CLAUSE_VECTOR */
322 1, /* OMP_CLAUSE_NUM_GANGS */
323 1, /* OMP_CLAUSE_NUM_WORKERS */
324 1, /* OMP_CLAUSE_VECTOR_LENGTH */
325 3, /* OMP_CLAUSE_TILE */
326 0, /* OMP_CLAUSE_IF_PRESENT */
327 0, /* OMP_CLAUSE_FINALIZE */
328 0, /* OMP_CLAUSE_NOHOST */
331 const char * const omp_clause_code_name[] =
333 "error_clause",
334 "private",
335 "shared",
336 "firstprivate",
337 "lastprivate",
338 "reduction",
339 "task_reduction",
340 "in_reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "affinity",
345 "aligned",
346 "allocate",
347 "depend",
348 "nontemporal",
349 "uniform",
350 "enter",
351 "link",
352 "detach",
353 "use_device_ptr",
354 "use_device_addr",
355 "is_device_ptr",
356 "inclusive",
357 "exclusive",
358 "from",
359 "to",
360 "map",
361 "has_device_addr",
362 "doacross",
363 "_cache_",
364 "gang",
365 "async",
366 "wait",
367 "auto",
368 "seq",
369 "_looptemp_",
370 "_reductemp_",
371 "_condtemp_",
372 "_scantemp_",
373 "if",
374 "num_threads",
375 "schedule",
376 "nowait",
377 "ordered",
378 "default",
379 "collapse",
380 "untied",
381 "final",
382 "mergeable",
383 "device",
384 "dist_schedule",
385 "inbranch",
386 "notinbranch",
387 "num_teams",
388 "thread_limit",
389 "proc_bind",
390 "safelen",
391 "simdlen",
392 "device_type",
393 "for",
394 "parallel",
395 "sections",
396 "taskgroup",
397 "priority",
398 "grainsize",
399 "num_tasks",
400 "nogroup",
401 "threads",
402 "simd",
403 "hint",
404 "defaultmap",
405 "order",
406 "bind",
407 "filter",
408 "_simduid_",
409 "_simt_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length",
416 "tile",
417 "if_present",
418 "finalize",
419 "nohost",
422 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
423 clause names, but for use in diagnostics etc. would like to use the "user"
424 clause names. */
426 const char *
427 user_omp_clause_code_name (tree clause, bool oacc)
429 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
430 distinguish clauses as seen by the user. See also where front ends do
431 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
432 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
433 switch (OMP_CLAUSE_MAP_KIND (clause))
435 case GOMP_MAP_FORCE_ALLOC:
436 case GOMP_MAP_ALLOC: return "create";
437 case GOMP_MAP_FORCE_TO:
438 case GOMP_MAP_TO: return "copyin";
439 case GOMP_MAP_FORCE_FROM:
440 case GOMP_MAP_FROM: return "copyout";
441 case GOMP_MAP_FORCE_TOFROM:
442 case GOMP_MAP_TOFROM: return "copy";
443 case GOMP_MAP_RELEASE: return "delete";
444 case GOMP_MAP_FORCE_PRESENT: return "present";
445 case GOMP_MAP_ATTACH: return "attach";
446 case GOMP_MAP_FORCE_DETACH:
447 case GOMP_MAP_DETACH: return "detach";
448 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
449 case GOMP_MAP_LINK: return "link";
450 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
451 default: break;
454 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.cc. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.cc:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case LANG_TYPE: return sizeof (tree_type_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
1000 case tcc_reference: /* a reference */
1001 case tcc_expression: /* an expression */
1002 case tcc_statement: /* an expression with side effects */
1003 case tcc_comparison: /* a comparison expression */
1004 case tcc_unary: /* a unary arithmetic expression */
1005 case tcc_binary: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp)
1007 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1009 case tcc_constant: /* a constant */
1010 switch (code)
1012 case VOID_CST: return sizeof (tree_typed);
1013 case INTEGER_CST: gcc_unreachable ();
1014 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1015 case REAL_CST: return sizeof (tree_real_cst);
1016 case FIXED_CST: return sizeof (tree_fixed_cst);
1017 case COMPLEX_CST: return sizeof (tree_complex);
1018 case VECTOR_CST: gcc_unreachable ();
1019 case STRING_CST: gcc_unreachable ();
1020 default:
1021 gcc_checking_assert (code >= NUM_TREE_CODES);
1022 return lang_hooks.tree_size (code);
1025 case tcc_exceptional: /* something random, like an identifier. */
1026 switch (code)
1028 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1029 case TREE_LIST: return sizeof (tree_list);
1031 case ERROR_MARK:
1032 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1034 case TREE_VEC: gcc_unreachable ();
1035 case OMP_CLAUSE: gcc_unreachable ();
1037 case SSA_NAME: return sizeof (tree_ssa_name);
1039 case STATEMENT_LIST: return sizeof (tree_statement_list);
1040 case BLOCK: return sizeof (struct tree_block);
1041 case CONSTRUCTOR: return sizeof (tree_constructor);
1042 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1043 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1045 default:
1046 gcc_checking_assert (code >= NUM_TREE_CODES);
1047 return lang_hooks.tree_size (code);
1050 default:
1051 gcc_unreachable ();
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1057 size_t
1058 tree_size (const_tree node)
1060 const enum tree_code code = TREE_CODE (node);
1061 switch (code)
1063 case INTEGER_CST:
1064 return (sizeof (struct tree_int_cst)
1065 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1067 case TREE_BINFO:
1068 return (offsetof (struct tree_binfo, base_binfos)
1069 + vec<tree, va_gc>
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1072 case TREE_VEC:
1073 return (sizeof (struct tree_vec)
1074 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1076 case VECTOR_CST:
1077 return (sizeof (struct tree_vector)
1078 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1080 case STRING_CST:
1081 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1083 case OMP_CLAUSE:
1084 return (sizeof (struct tree_omp_clause)
1085 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1086 * sizeof (tree));
1088 default:
1089 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1090 return (sizeof (struct tree_exp)
1091 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1092 else
1093 return tree_code_size (code);
1097 /* Return tree node kind based on tree CODE. */
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code)
1102 enum tree_code_class type = TREE_CODE_CLASS (code);
1104 switch (type)
1106 case tcc_declaration: /* A decl node */
1107 return d_kind;
1108 case tcc_type: /* a type node */
1109 return t_kind;
1110 case tcc_statement: /* an expression with side effects */
1111 return s_kind;
1112 case tcc_reference: /* a reference */
1113 return r_kind;
1114 case tcc_expression: /* an expression */
1115 case tcc_comparison: /* a comparison expression */
1116 case tcc_unary: /* a unary arithmetic expression */
1117 case tcc_binary: /* a binary arithmetic expression */
1118 return e_kind;
1119 case tcc_constant: /* a constant */
1120 return c_kind;
1121 case tcc_exceptional: /* something random, like an identifier. */
1122 switch (code)
1124 case IDENTIFIER_NODE:
1125 return id_kind;
1126 case TREE_VEC:
1127 return vec_kind;
1128 case TREE_BINFO:
1129 return binfo_kind;
1130 case SSA_NAME:
1131 return ssa_name_kind;
1132 case BLOCK:
1133 return b_kind;
1134 case CONSTRUCTOR:
1135 return constr_kind;
1136 case OMP_CLAUSE:
1137 return omp_clause_kind;
1138 default:
1139 return x_kind;
1141 break;
1142 case tcc_vl_exp:
1143 return e_kind;
1144 default:
1145 gcc_unreachable ();
1149 /* Record interesting allocation statistics for a tree node with CODE
1150 and LENGTH. */
1152 static void
1153 record_node_allocation_statistics (enum tree_code code, size_t length)
1155 if (!GATHER_STATISTICS)
1156 return;
1158 tree_node_kind kind = get_stats_node_kind (code);
1160 tree_code_counts[(int) code]++;
1161 tree_node_counts[(int) kind]++;
1162 tree_node_sizes[(int) kind] += length;
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1168 allocate_decl_uid (void)
1170 return next_decl_uid++;
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 tree_code_size.
1179 Achoo! I got a code in the node. */
1181 tree
1182 make_node (enum tree_code code MEM_STAT_DECL)
1184 tree t;
1185 enum tree_code_class type = TREE_CODE_CLASS (code);
1186 size_t length = tree_code_size (code);
1188 record_node_allocation_statistics (code, length);
1190 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1191 TREE_SET_CODE (t, code);
1193 switch (type)
1195 case tcc_statement:
1196 if (code != DEBUG_BEGIN_STMT)
1197 TREE_SIDE_EFFECTS (t) = 1;
1198 break;
1200 case tcc_declaration:
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1203 if (code == FUNCTION_DECL)
1205 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1206 SET_DECL_MODE (t, FUNCTION_MODE);
1208 else
1209 SET_DECL_ALIGN (t, 1);
1211 DECL_SOURCE_LOCATION (t) = input_location;
1212 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1213 DECL_UID (t) = --next_debug_decl_uid;
1214 else
1216 DECL_UID (t) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t, -1);
1219 if (TREE_CODE (t) == LABEL_DECL)
1220 LABEL_DECL_UID (t) = -1;
1222 break;
1224 case tcc_type:
1225 TYPE_UID (t) = next_type_uid++;
1226 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1227 TYPE_USER_ALIGN (t) = 0;
1228 TYPE_MAIN_VARIANT (t) = t;
1229 TYPE_CANONICAL (t) = t;
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t) = NULL_TREE;
1233 targetm.set_default_type_attributes (t);
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t) = -1;
1237 break;
1239 case tcc_constant:
1240 TREE_CONSTANT (t) = 1;
1241 break;
1243 case tcc_expression:
1244 switch (code)
1246 case INIT_EXPR:
1247 case MODIFY_EXPR:
1248 case VA_ARG_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case PREINCREMENT_EXPR:
1251 case POSTDECREMENT_EXPR:
1252 case POSTINCREMENT_EXPR:
1253 /* All of these have side-effects, no matter what their
1254 operands are. */
1255 TREE_SIDE_EFFECTS (t) = 1;
1256 break;
1258 default:
1259 break;
1261 break;
1263 case tcc_exceptional:
1264 switch (code)
1266 case TARGET_OPTION_NODE:
1267 TREE_TARGET_OPTION(t)
1268 = ggc_cleared_alloc<struct cl_target_option> ();
1269 break;
1271 case OPTIMIZATION_NODE:
1272 TREE_OPTIMIZATION (t)
1273 = ggc_cleared_alloc<struct cl_optimization> ();
1274 break;
1276 default:
1277 break;
1279 break;
1281 default:
1282 /* Other classes need no special treatment. */
1283 break;
1286 return t;
1289 /* Free tree node. */
1291 void
1292 free_node (tree node)
1294 enum tree_code code = TREE_CODE (node);
1295 if (GATHER_STATISTICS)
1297 enum tree_node_kind kind = get_stats_node_kind (code);
1299 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1300 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1301 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1303 tree_code_counts[(int) TREE_CODE (node)]--;
1304 tree_node_counts[(int) kind]--;
1305 tree_node_sizes[(int) kind] -= tree_size (node);
1307 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1308 vec_free (CONSTRUCTOR_ELTS (node));
1309 else if (code == BLOCK)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1311 else if (code == TREE_BINFO)
1312 vec_free (BINFO_BASE_ACCESSES (node));
1313 else if (code == OPTIMIZATION_NODE)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1315 else if (code == TARGET_OPTION_NODE)
1316 cl_target_option_free (TREE_TARGET_OPTION (node));
1317 ggc_free (node);
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1323 tree
1324 copy_node (tree node MEM_STAT_DECL)
1326 tree t;
1327 enum tree_code code = TREE_CODE (node);
1328 size_t length;
1330 gcc_assert (code != STATEMENT_LIST);
1332 length = tree_size (node);
1333 record_node_allocation_statistics (code, length);
1334 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1335 memcpy (t, node, length);
1337 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1338 TREE_CHAIN (t) = 0;
1339 TREE_ASM_WRITTEN (t) = 0;
1340 TREE_VISITED (t) = 0;
1342 if (TREE_CODE_CLASS (code) == tcc_declaration)
1344 if (code == DEBUG_EXPR_DECL)
1345 DECL_UID (t) = --next_debug_decl_uid;
1346 else
1348 DECL_UID (t) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node))
1350 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1352 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1353 && DECL_HAS_VALUE_EXPR_P (node))
1355 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1356 DECL_HAS_VALUE_EXPR_P (t) = 1;
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 if (VAR_P (node))
1361 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1362 t->decl_with_vis.symtab_node = NULL;
1364 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1366 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1367 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1369 if (TREE_CODE (node) == FUNCTION_DECL)
1371 DECL_STRUCT_FUNCTION (t) = NULL;
1372 t->decl_with_vis.symtab_node = NULL;
1375 else if (TREE_CODE_CLASS (code) == tcc_type)
1377 TYPE_UID (t) = next_type_uid++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t) = 0;
1384 TYPE_SYMTAB_DIE (t) = 0;
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t))
1389 TYPE_CACHED_VALUES_P (t) = 0;
1390 TYPE_CACHED_VALUES (t) = NULL_TREE;
1393 else if (code == TARGET_OPTION_NODE)
1395 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1396 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1397 sizeof (struct cl_target_option));
1399 else if (code == OPTIMIZATION_NODE)
1401 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1402 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1403 sizeof (struct cl_optimization));
1406 return t;
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1412 tree
1413 copy_list (tree list)
1415 tree head;
1416 tree prev, next;
1418 if (list == 0)
1419 return 0;
1421 head = prev = copy_node (list);
1422 next = TREE_CHAIN (list);
1423 while (next)
1425 TREE_CHAIN (prev) = copy_node (next);
1426 prev = TREE_CHAIN (prev);
1427 next = TREE_CHAIN (next);
1429 return head;
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1436 static unsigned int
1437 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1439 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1441 upper bit set. */
1442 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1443 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1444 return cst.get_len ();
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1449 static tree
1450 build_new_int_cst (tree type, const wide_int &cst)
1452 unsigned int len = cst.get_len ();
1453 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1454 tree nt = make_int_cst (len, ext_len);
1456 if (len < ext_len)
1458 --ext_len;
1459 TREE_INT_CST_ELT (nt, ext_len)
1460 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1461 for (unsigned int i = len; i < ext_len; ++i)
1462 TREE_INT_CST_ELT (nt, i) = -1;
1464 else if (TYPE_UNSIGNED (type)
1465 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1467 len--;
1468 TREE_INT_CST_ELT (nt, len)
1469 = zext_hwi (cst.elt (len),
1470 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1473 for (unsigned int i = 0; i < len; i++)
1474 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1475 TREE_TYPE (nt) = type;
1476 return nt;
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1481 static tree
1482 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1483 CXX_MEM_STAT_INFO)
1485 size_t length = sizeof (struct tree_poly_int_cst);
1486 record_node_allocation_statistics (POLY_INT_CST, length);
1488 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1490 TREE_SET_CODE (t, POLY_INT_CST);
1491 TREE_CONSTANT (t) = 1;
1492 TREE_TYPE (t) = type;
1493 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1494 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1495 return t;
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1500 tree
1501 build_int_cst (tree type, poly_int64 cst)
1503 /* Support legacy code. */
1504 if (!type)
1505 type = integer_type_node;
1507 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1512 tree
1513 build_int_cstu (tree type, poly_uint64 cst)
1515 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1520 tree
1521 build_int_cst_type (tree type, poly_int64 cst)
1523 gcc_assert (type);
1524 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1530 tree
1531 double_int_to_tree (tree type, double_int cst)
1533 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1551 tree
1552 force_fit_type (tree type, const poly_wide_int_ref &cst,
1553 int overflowable, bool overflowed)
1555 signop sign = TYPE_SIGN (type);
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed || !wi::fits_to_tree_p (cst, type))
1560 if (overflowed
1561 || overflowable < 0
1562 || (overflowable > 0 && sign == SIGNED))
1564 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1565 sign);
1566 tree t;
1567 if (tmp.is_constant ())
1568 t = build_new_int_cst (type, tmp.coeffs[0]);
1569 else
1571 tree coeffs[NUM_POLY_INT_COEFFS];
1572 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1574 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1575 TREE_OVERFLOW (coeffs[i]) = 1;
1577 t = build_new_poly_int_cst (type, coeffs);
1579 TREE_OVERFLOW (t) = 1;
1580 return t;
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type, cst);
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1591 /* Return the hash code X, an INTEGER_CST. */
1593 hashval_t
1594 int_cst_hasher::hash (tree x)
1596 const_tree const t = x;
1597 hashval_t code = TYPE_UID (TREE_TYPE (t));
1598 int i;
1600 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1601 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1603 return code;
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1609 bool
1610 int_cst_hasher::equal (tree x, tree y)
1612 const_tree const xt = x;
1613 const_tree const yt = y;
1615 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1616 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1617 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1618 return false;
1620 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1621 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1622 return false;
1624 return true;
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1631 static inline tree
1632 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1633 int slot, int max_slots)
1635 gcc_checking_assert (slot >= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1642 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1643 if (!t)
1645 /* Create a new shared int. */
1646 t = build_new_int_cst (type, cst);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1649 return t;
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1660 static tree
1661 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1663 tree t;
1664 int ix = -1;
1665 int limit = 0;
1667 gcc_assert (type);
1668 unsigned int prec = TYPE_PRECISION (type);
1669 signop sgn = TYPE_SIGN (type);
1671 /* Verify that everything is canonical. */
1672 int l = pcst.get_len ();
1673 if (l > 1)
1675 if (pcst.elt (l - 1) == 0)
1676 gcc_checking_assert (pcst.elt (l - 2) < 0);
1677 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1678 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1681 wide_int cst = wide_int::from (pcst, prec, sgn);
1682 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1684 enum tree_code code = TREE_CODE (type);
1685 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1687 /* Cache NULL pointer and zero bounds. */
1688 if (cst == 0)
1689 ix = 0;
1690 /* Cache upper bounds of pointers. */
1691 else if (cst == wi::max_value (prec, sgn))
1692 ix = 1;
1693 /* Cache 1 which is used for a non-zero range. */
1694 else if (cst == 1)
1695 ix = 2;
1697 if (ix >= 0)
1699 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t) == type
1702 && cst == wi::to_wide (t));
1703 return t;
1706 if (ext_len == 1)
1708 /* We just need to store a single HOST_WIDE_INT. */
1709 HOST_WIDE_INT hwi;
1710 if (TYPE_UNSIGNED (type))
1711 hwi = cst.to_uhwi ();
1712 else
1713 hwi = cst.to_shwi ();
1715 switch (code)
1717 case NULLPTR_TYPE:
1718 gcc_assert (hwi == 0);
1719 /* Fallthru. */
1721 case POINTER_TYPE:
1722 case REFERENCE_TYPE:
1723 /* Ignore pointers, as they were already handled above. */
1724 break;
1726 case BOOLEAN_TYPE:
1727 /* Cache false or true. */
1728 limit = 2;
1729 if (IN_RANGE (hwi, 0, 1))
1730 ix = hwi;
1731 break;
1733 case INTEGER_TYPE:
1734 case OFFSET_TYPE:
1735 if (TYPE_SIGN (type) == UNSIGNED)
1737 /* Cache [0, N). */
1738 limit = param_integer_share_limit;
1739 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1740 ix = hwi;
1742 else
1744 /* Cache [-1, N). */
1745 limit = param_integer_share_limit + 1;
1746 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1747 ix = hwi + 1;
1749 break;
1751 case ENUMERAL_TYPE:
1752 break;
1754 default:
1755 gcc_unreachable ();
1758 if (ix >= 0)
1760 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t) == type
1763 && TREE_INT_CST_NUNITS (t) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t) == 1
1766 && TREE_INT_CST_ELT (t, 0) == hwi);
1767 return t;
1769 else
1771 /* Use the cache of larger shared ints, using int_cst_node as
1772 a temporary. */
1774 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1775 TREE_TYPE (int_cst_node) = type;
1777 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1778 t = *slot;
1779 if (!t)
1781 /* Insert this one into the hash table. */
1782 t = int_cst_node;
1783 *slot = t;
1784 /* Make a new node for next time round. */
1785 int_cst_node = make_int_cst (1, 1);
1789 else
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1793 to worry about. */
1795 tree nt = build_new_int_cst (type, cst);
1796 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1797 t = *slot;
1798 if (!t)
1800 /* Insert this one into the hash table. */
1801 t = nt;
1802 *slot = t;
1804 else
1805 ggc_free (nt);
1808 return t;
1811 hashval_t
1812 poly_int_cst_hasher::hash (tree t)
1814 inchash::hash hstate;
1816 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1817 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1818 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1820 return hstate.end ();
1823 bool
1824 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1826 if (TREE_TYPE (x) != y.first)
1827 return false;
1828 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1830 return false;
1831 return true;
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1837 tree
1838 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1840 unsigned int prec = TYPE_PRECISION (type);
1841 gcc_assert (prec <= values.coeffs[0].get_precision ());
1842 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1844 inchash::hash h;
1845 h.add_int (TYPE_UID (type));
1846 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1847 h.add_wide_int (c.coeffs[i]);
1848 poly_int_cst_hasher::compare_type comp (type, &c);
1849 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1850 INSERT);
1851 if (*slot == NULL_TREE)
1853 tree coeffs[NUM_POLY_INT_COEFFS];
1854 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1855 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1856 *slot = build_new_poly_int_cst (type, coeffs);
1858 return *slot;
1861 /* Create a constant tree with value VALUE in type TYPE. */
1863 tree
1864 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1866 if (value.is_constant ())
1867 return wide_int_to_tree_1 (type, value.coeffs[0]);
1868 return build_poly_int_cst (type, value);
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1878 tree
1879 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1881 tree type = TREE_TYPE (t);
1882 int ix = -1;
1883 int limit = 0;
1884 int prec = TYPE_PRECISION (type);
1886 gcc_assert (!TREE_OVERFLOW (t));
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type))
1892 case NULLPTR_TYPE:
1893 gcc_checking_assert (integer_zerop (t));
1894 /* Fallthru. */
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1899 if (integer_zerop (t))
1900 ix = 0;
1901 else if (integer_onep (t))
1902 ix = 2;
1904 if (ix >= 0)
1905 limit = 3;
1907 break;
1909 case BOOLEAN_TYPE:
1910 /* Cache false or true. */
1911 limit = 2;
1912 if (wi::ltu_p (wi::to_wide (t), 2))
1913 ix = TREE_INT_CST_ELT (t, 0);
1914 break;
1916 case INTEGER_TYPE:
1917 case OFFSET_TYPE:
1918 if (TYPE_UNSIGNED (type))
1920 /* Cache 0..N */
1921 limit = param_integer_share_limit;
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec < HOST_BITS_PER_WIDE_INT)
1928 if (tree_to_uhwi (t)
1929 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1930 ix = tree_to_uhwi (t);
1932 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1933 ix = tree_to_uhwi (t);
1935 else
1937 /* Cache -1..N */
1938 limit = param_integer_share_limit + 1;
1940 if (integer_minus_onep (t))
1941 ix = 0;
1942 else if (!wi::neg_p (wi::to_wide (t)))
1944 if (prec < HOST_BITS_PER_WIDE_INT)
1946 if (tree_to_shwi (t) < param_integer_share_limit)
1947 ix = tree_to_shwi (t) + 1;
1949 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1950 ix = tree_to_shwi (t) + 1;
1953 break;
1955 case ENUMERAL_TYPE:
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1957 members. */
1958 break;
1960 default:
1961 gcc_unreachable ();
1964 if (ix >= 0)
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type))
1969 TYPE_CACHED_VALUES_P (type) = 1;
1970 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1973 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1975 gcc_checking_assert (might_duplicate);
1976 t = r;
1978 else
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1981 else
1983 /* Use the cache of larger shared ints. */
1984 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1985 if (tree r = *slot)
1987 /* If there is already an entry for the number verify it's the
1988 same value. */
1989 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1990 /* And return the cached value. */
1991 t = r;
1993 else
1994 /* Otherwise insert this one into the hash table. */
1995 *slot = t;
1998 return t;
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2005 tree
2006 build_low_bits_mask (tree type, unsigned bits)
2008 gcc_assert (bits <= TYPE_PRECISION (type));
2010 return wide_int_to_tree (type, wi::mask (bits, false,
2011 TYPE_PRECISION (type)));
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2017 bool
2018 cst_and_fits_in_hwi (const_tree x)
2020 return (TREE_CODE (x) == INTEGER_CST
2021 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2027 tree
2028 make_vector (unsigned log2_npatterns,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL)
2031 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2032 tree t;
2033 unsigned npatterns = 1 << log2_npatterns;
2034 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2035 unsigned length = (sizeof (struct tree_vector)
2036 + (encoded_nelts - 1) * sizeof (tree));
2038 record_node_allocation_statistics (VECTOR_CST, length);
2040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2042 TREE_SET_CODE (t, VECTOR_CST);
2043 TREE_CONSTANT (t) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2045 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2047 return t;
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2053 tree
2054 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2056 if (vec_safe_length (v) == 0)
2057 return build_zero_cst (type);
2059 unsigned HOST_WIDE_INT idx, nelts;
2060 tree value;
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2064 tree_vector_builder vec (type, nelts, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2067 if (TREE_CODE (value) == VECTOR_CST)
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2071 for (unsigned i = 0; i < sub_nelts; ++i)
2072 vec.quick_push (VECTOR_CST_ELT (value, i));
2074 else
2075 vec.quick_push (value);
2077 while (vec.length () < nelts)
2078 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2080 return vec.build ();
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2084 tree
2085 build_vector_from_val (tree vectype, tree sc)
2087 unsigned HOST_WIDE_INT i, nunits;
2089 if (sc == error_mark_node)
2090 return sc;
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2099 TREE_TYPE (vectype)));
2101 if (CONSTANT_CLASS_P (sc))
2103 tree_vector_builder v (vectype, 1, 1);
2104 v.quick_push (sc);
2105 return v.build ();
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2108 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2109 else
2111 vec<constructor_elt, va_gc> *v;
2112 vec_alloc (v, nunits);
2113 for (i = 0; i < nunits; ++i)
2114 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2115 return build_constructor (vectype, v);
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2122 tree
2123 build_uniform_cst (tree type, tree sc)
2125 if (!VECTOR_TYPE_P (type))
2126 return sc;
2128 return build_vector_from_val (type, sc);
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2135 tree
2136 build_vec_series (tree type, tree base, tree step)
2138 if (integer_zerop (step))
2139 return build_vector_from_val (type, base);
2140 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2142 tree_vector_builder builder (type, 1, 3);
2143 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (base) + wi::to_wide (step));
2145 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (elt1) + wi::to_wide (step));
2147 builder.quick_push (base);
2148 builder.quick_push (elt1);
2149 builder.quick_push (elt2);
2150 return builder.build ();
2152 return build2 (VEC_SERIES_EXPR, type, base, step);
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2159 tree
2160 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2162 tree index_vec_type = vec_type;
2163 tree index_elt_type = TREE_TYPE (vec_type);
2164 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2165 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2167 index_elt_type = build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2169 index_vec_type = build_vector_type (index_elt_type, nunits);
2172 tree_vector_builder v (index_vec_type, 1, 3);
2173 for (unsigned int i = 0; i < 3; ++i)
2174 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2175 return v.build ();
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2181 tree
2182 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2184 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2185 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2186 /* Optimize the constant case. */
2187 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2188 count /= 2;
2189 tree_vector_builder builder (vec_type, count, 2);
2190 for (unsigned int i = 0; i < count * 2; ++i)
2191 builder.quick_push (i < num_a ? a : b);
2192 return builder.build ();
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2198 void
2199 recompute_constructor_flags (tree c)
2201 unsigned int i;
2202 tree val;
2203 bool constant_p = true;
2204 bool side_effects_p = false;
2205 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val))
2214 constant_p = false;
2215 if (TREE_SIDE_EFFECTS (val))
2216 side_effects_p = true;
2219 TREE_SIDE_EFFECTS (c) = side_effects_p;
2220 TREE_CONSTANT (c) = constant_p;
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2224 CONSTRUCTOR C. */
2226 void
2227 verify_constructor_flags (tree c)
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = TREE_CONSTANT (c);
2232 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2237 if (constant_p && !TREE_CONSTANT (val))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2246 tree
2247 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2249 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2251 TREE_TYPE (c) = type;
2252 CONSTRUCTOR_ELTS (c) = vals;
2254 recompute_constructor_flags (c);
2256 return c;
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 INDEX and VALUE. */
2261 tree
2262 build_constructor_single (tree type, tree index, tree value)
2264 vec<constructor_elt, va_gc> *v;
2265 constructor_elt elt = {index, value};
2267 vec_alloc (v, 1);
2268 v->quick_push (elt);
2270 return build_constructor (type, v);
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2276 tree
2277 build_constructor_from_list (tree type, tree vals)
2279 tree t;
2280 vec<constructor_elt, va_gc> *v = NULL;
2282 if (vals)
2284 vec_alloc (v, list_length (vals));
2285 for (t = vals; t; t = TREE_CHAIN (t))
2286 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2289 return build_constructor (type, v);
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2296 tree
2297 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2299 vec<constructor_elt, va_gc> *v = NULL;
2301 for (tree t : vals)
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2304 return build_constructor (type, v);
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2310 tree
2311 build_constructor_va (tree type, int nelts, ...)
2313 vec<constructor_elt, va_gc> *v = NULL;
2314 va_list p;
2316 va_start (p, nelts);
2317 vec_alloc (v, nelts);
2318 while (nelts--)
2320 tree index = va_arg (p, tree);
2321 tree value = va_arg (p, tree);
2322 CONSTRUCTOR_APPEND_ELT (v, index, value);
2324 va_end (p);
2325 return build_constructor (type, v);
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2330 tree
2331 build_clobber (tree type, enum clobber_kind kind)
2333 tree clobber = build_constructor (type, NULL);
2334 TREE_THIS_VOLATILE (clobber) = true;
2335 CLOBBER_KIND (clobber) = kind;
2336 return clobber;
2339 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2341 tree
2342 build_fixed (tree type, FIXED_VALUE_TYPE f)
2344 tree v;
2345 FIXED_VALUE_TYPE *fp;
2347 v = make_node (FIXED_CST);
2348 fp = ggc_alloc<fixed_value> ();
2349 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2351 TREE_TYPE (v) = type;
2352 TREE_FIXED_CST_PTR (v) = fp;
2353 return v;
2356 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2358 tree
2359 build_real (tree type, REAL_VALUE_TYPE d)
2361 tree v;
2362 int overflow = 0;
2364 /* dconst{0,1,2,m1,half} are used in various places in
2365 the middle-end and optimizers, allow them here
2366 even for decimal floating point types as an exception
2367 by converting them to decimal. */
2368 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2369 && (d.cl == rvc_normal || d.cl == rvc_zero)
2370 && !d.decimal)
2372 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2373 decimal_real_from_string (&d, "1");
2374 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "2");
2376 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "-1");
2378 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "0.5");
2380 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2382 /* Make sure to give zero the minimum quantum exponent for
2383 the type (which corresponds to all bits zero). */
2384 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2385 char buf[16];
2386 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2387 decimal_real_from_string (&d, buf);
2389 else
2390 gcc_unreachable ();
2393 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2394 Consider doing it via real_convert now. */
2396 v = make_node (REAL_CST);
2397 TREE_TYPE (v) = type;
2398 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2399 TREE_OVERFLOW (v) = overflow;
2400 return v;
2403 /* Like build_real, but first truncate D to the type. */
2405 tree
2406 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2408 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2411 /* Return a new REAL_CST node whose type is TYPE
2412 and whose value is the integer value of the INTEGER_CST node I. */
2414 REAL_VALUE_TYPE
2415 real_value_from_int_cst (const_tree type, const_tree i)
2417 REAL_VALUE_TYPE d;
2419 /* Clear all bits of the real value type so that we can later do
2420 bitwise comparisons to see if two values are the same. */
2421 memset (&d, 0, sizeof d);
2423 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2424 TYPE_SIGN (TREE_TYPE (i)));
2425 return d;
2428 /* Given a tree representing an integer constant I, return a tree
2429 representing the same value as a floating-point constant of type TYPE. */
2431 tree
2432 build_real_from_int_cst (tree type, const_tree i)
2434 tree v;
2435 int overflow = TREE_OVERFLOW (i);
2437 v = build_real (type, real_value_from_int_cst (type, i));
2439 TREE_OVERFLOW (v) |= overflow;
2440 return v;
2443 /* Return a new REAL_CST node whose type is TYPE
2444 and whose value is the integer value I which has sign SGN. */
2446 tree
2447 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2449 REAL_VALUE_TYPE d;
2451 /* Clear all bits of the real value type so that we can later do
2452 bitwise comparisons to see if two values are the same. */
2453 memset (&d, 0, sizeof d);
2455 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2456 return build_real (type, d);
2459 /* Return a newly constructed STRING_CST node whose value is the LEN
2460 characters at STR when STR is nonnull, or all zeros otherwise.
2461 Note that for a C string literal, LEN should include the trailing NUL.
2462 The TREE_TYPE is not initialized. */
2464 tree
2465 build_string (unsigned len, const char *str /*= NULL */)
2467 /* Do not waste bytes provided by padding of struct tree_string. */
2468 unsigned size = len + offsetof (struct tree_string, str) + 1;
2470 record_node_allocation_statistics (STRING_CST, size);
2472 tree s = (tree) ggc_internal_alloc (size);
2474 memset (s, 0, sizeof (struct tree_typed));
2475 TREE_SET_CODE (s, STRING_CST);
2476 TREE_CONSTANT (s) = 1;
2477 TREE_STRING_LENGTH (s) = len;
2478 if (str)
2479 memcpy (s->string.str, str, len);
2480 else
2481 memset (s->string.str, 0, len);
2482 s->string.str[len] = '\0';
2484 return s;
2487 /* Return a newly constructed COMPLEX_CST node whose value is
2488 specified by the real and imaginary parts REAL and IMAG.
2489 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2490 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2492 tree
2493 build_complex (tree type, tree real, tree imag)
2495 gcc_assert (CONSTANT_CLASS_P (real));
2496 gcc_assert (CONSTANT_CLASS_P (imag));
2498 tree t = make_node (COMPLEX_CST);
2500 TREE_REALPART (t) = real;
2501 TREE_IMAGPART (t) = imag;
2502 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2503 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2504 return t;
2507 /* Build a complex (inf +- 0i), such as for the result of cproj.
2508 TYPE is the complex tree type of the result. If NEG is true, the
2509 imaginary zero is negative. */
2511 tree
2512 build_complex_inf (tree type, bool neg)
2514 REAL_VALUE_TYPE rzero = dconst0;
2516 rzero.sign = neg;
2517 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2518 build_real (TREE_TYPE (type), rzero));
2521 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2522 element is set to 1. In particular, this is 1 + i for complex types. */
2524 tree
2525 build_each_one_cst (tree type)
2527 if (TREE_CODE (type) == COMPLEX_TYPE)
2529 tree scalar = build_one_cst (TREE_TYPE (type));
2530 return build_complex (type, scalar, scalar);
2532 else
2533 return build_one_cst (type);
2536 /* Return a constant of arithmetic type TYPE which is the
2537 multiplicative identity of the set TYPE. */
2539 tree
2540 build_one_cst (tree type)
2542 switch (TREE_CODE (type))
2544 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2545 case POINTER_TYPE: case REFERENCE_TYPE:
2546 case OFFSET_TYPE:
2547 return build_int_cst (type, 1);
2549 case REAL_TYPE:
2550 return build_real (type, dconst1);
2552 case FIXED_POINT_TYPE:
2553 /* We can only generate 1 for accum types. */
2554 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2555 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2557 case VECTOR_TYPE:
2559 tree scalar = build_one_cst (TREE_TYPE (type));
2561 return build_vector_from_val (type, scalar);
2564 case COMPLEX_TYPE:
2565 return build_complex (type,
2566 build_one_cst (TREE_TYPE (type)),
2567 build_zero_cst (TREE_TYPE (type)));
2569 default:
2570 gcc_unreachable ();
2574 /* Return an integer of type TYPE containing all 1's in as much precision as
2575 it contains, or a complex or vector whose subparts are such integers. */
2577 tree
2578 build_all_ones_cst (tree type)
2580 if (TREE_CODE (type) == COMPLEX_TYPE)
2582 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2583 return build_complex (type, scalar, scalar);
2585 else
2586 return build_minus_one_cst (type);
2589 /* Return a constant of arithmetic type TYPE which is the
2590 opposite of the multiplicative identity of the set TYPE. */
2592 tree
2593 build_minus_one_cst (tree type)
2595 switch (TREE_CODE (type))
2597 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2598 case POINTER_TYPE: case REFERENCE_TYPE:
2599 case OFFSET_TYPE:
2600 return build_int_cst (type, -1);
2602 case REAL_TYPE:
2603 return build_real (type, dconstm1);
2605 case FIXED_POINT_TYPE:
2606 /* We can only generate 1 for accum types. */
2607 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2608 return build_fixed (type,
2609 fixed_from_double_int (double_int_minus_one,
2610 SCALAR_TYPE_MODE (type)));
2612 case VECTOR_TYPE:
2614 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2616 return build_vector_from_val (type, scalar);
2619 case COMPLEX_TYPE:
2620 return build_complex (type,
2621 build_minus_one_cst (TREE_TYPE (type)),
2622 build_zero_cst (TREE_TYPE (type)));
2624 default:
2625 gcc_unreachable ();
2629 /* Build 0 constant of type TYPE. This is used by constructor folding
2630 and thus the constant should be represented in memory by
2631 zero(es). */
2633 tree
2634 build_zero_cst (tree type)
2636 switch (TREE_CODE (type))
2638 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2639 case POINTER_TYPE: case REFERENCE_TYPE:
2640 case OFFSET_TYPE: case NULLPTR_TYPE:
2641 return build_int_cst (type, 0);
2643 case REAL_TYPE:
2644 return build_real (type, dconst0);
2646 case FIXED_POINT_TYPE:
2647 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2649 case VECTOR_TYPE:
2651 tree scalar = build_zero_cst (TREE_TYPE (type));
2653 return build_vector_from_val (type, scalar);
2656 case COMPLEX_TYPE:
2658 tree zero = build_zero_cst (TREE_TYPE (type));
2660 return build_complex (type, zero, zero);
2663 default:
2664 if (!AGGREGATE_TYPE_P (type))
2665 return fold_convert (type, integer_zero_node);
2666 return build_constructor (type, NULL);
2670 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2671 every WIDTH bits to fit TYPE's precision. */
2673 tree
2674 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2676 int n = (TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2677 / HOST_BITS_PER_WIDE_INT;
2678 unsigned HOST_WIDE_INT low, mask;
2679 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
2680 int i;
2682 gcc_assert (n && n <= WIDE_INT_MAX_ELTS);
2684 if (width == HOST_BITS_PER_WIDE_INT)
2685 low = value;
2686 else
2688 mask = ((HOST_WIDE_INT)1 << width) - 1;
2689 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2692 for (i = 0; i < n; i++)
2693 a[i] = low;
2695 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2696 return wide_int_to_tree
2697 (type, wide_int::from_array (a, n, TYPE_PRECISION (type)));
2700 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2701 unsigned constant in which only the sign bit is set. Return null
2702 otherwise. */
2704 tree
2705 sign_mask_for (tree type)
2707 /* Avoid having to choose between a real-only sign and a pair of signs.
2708 This could be relaxed if the choice becomes obvious later. */
2709 if (TREE_CODE (type) == COMPLEX_TYPE)
2710 return NULL_TREE;
2712 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2713 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2714 if (!bits || !pow2p_hwi (bits))
2715 return NULL_TREE;
2717 tree inttype = unsigned_type_for (type);
2718 if (!inttype)
2719 return NULL_TREE;
2721 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2722 if (VECTOR_TYPE_P (inttype))
2724 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2725 return build_vector_from_val (inttype, elt);
2727 return wide_int_to_tree (inttype, mask);
2730 /* Build a BINFO with LEN language slots. */
2732 tree
2733 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2735 tree t;
2736 size_t length = (offsetof (struct tree_binfo, base_binfos)
2737 + vec<tree, va_gc>::embedded_size (base_binfos));
2739 record_node_allocation_statistics (TREE_BINFO, length);
2741 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2743 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2745 TREE_SET_CODE (t, TREE_BINFO);
2747 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2749 return t;
2752 /* Create a CASE_LABEL_EXPR tree node and return it. */
2754 tree
2755 build_case_label (tree low_value, tree high_value, tree label_decl)
2757 tree t = make_node (CASE_LABEL_EXPR);
2759 TREE_TYPE (t) = void_type_node;
2760 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2762 CASE_LOW (t) = low_value;
2763 CASE_HIGH (t) = high_value;
2764 CASE_LABEL (t) = label_decl;
2765 CASE_CHAIN (t) = NULL_TREE;
2767 return t;
2770 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2771 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2772 The latter determines the length of the HOST_WIDE_INT vector. */
2774 tree
2775 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2777 tree t;
2778 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2779 + sizeof (struct tree_int_cst));
2781 gcc_assert (len);
2782 record_node_allocation_statistics (INTEGER_CST, length);
2784 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2786 TREE_SET_CODE (t, INTEGER_CST);
2787 TREE_INT_CST_NUNITS (t) = len;
2788 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2789 /* to_offset can only be applied to trees that are offset_int-sized
2790 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2791 must be exactly the precision of offset_int and so LEN is correct. */
2792 if (ext_len <= OFFSET_INT_ELTS)
2793 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2794 else
2795 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2797 TREE_CONSTANT (t) = 1;
2799 return t;
2802 /* Build a newly constructed TREE_VEC node of length LEN. */
2804 tree
2805 make_tree_vec (int len MEM_STAT_DECL)
2807 tree t;
2808 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2810 record_node_allocation_statistics (TREE_VEC, length);
2812 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2814 TREE_SET_CODE (t, TREE_VEC);
2815 TREE_VEC_LENGTH (t) = len;
2817 return t;
2820 /* Grow a TREE_VEC node to new length LEN. */
2822 tree
2823 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2825 gcc_assert (TREE_CODE (v) == TREE_VEC);
2827 int oldlen = TREE_VEC_LENGTH (v);
2828 gcc_assert (len > oldlen);
2830 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2831 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2833 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2835 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2837 TREE_VEC_LENGTH (v) = len;
2839 return v;
2842 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2843 fixed, and scalar, complex or vector. */
2845 bool
2846 zerop (const_tree expr)
2848 return (integer_zerop (expr)
2849 || real_zerop (expr)
2850 || fixed_zerop (expr));
2853 /* Return 1 if EXPR is the integer constant zero or a complex constant
2854 of zero, or a location wrapper for such a constant. */
2856 bool
2857 integer_zerop (const_tree expr)
2859 STRIP_ANY_LOCATION_WRAPPER (expr);
2861 switch (TREE_CODE (expr))
2863 case INTEGER_CST:
2864 return wi::to_wide (expr) == 0;
2865 case COMPLEX_CST:
2866 return (integer_zerop (TREE_REALPART (expr))
2867 && integer_zerop (TREE_IMAGPART (expr)));
2868 case VECTOR_CST:
2869 return (VECTOR_CST_NPATTERNS (expr) == 1
2870 && VECTOR_CST_DUPLICATE_P (expr)
2871 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2872 default:
2873 return false;
2877 /* Return 1 if EXPR is the integer constant one or the corresponding
2878 complex constant, or a location wrapper for such a constant. */
2880 bool
2881 integer_onep (const_tree expr)
2883 STRIP_ANY_LOCATION_WRAPPER (expr);
2885 switch (TREE_CODE (expr))
2887 case INTEGER_CST:
2888 return wi::eq_p (wi::to_widest (expr), 1);
2889 case COMPLEX_CST:
2890 return (integer_onep (TREE_REALPART (expr))
2891 && integer_zerop (TREE_IMAGPART (expr)));
2892 case VECTOR_CST:
2893 return (VECTOR_CST_NPATTERNS (expr) == 1
2894 && VECTOR_CST_DUPLICATE_P (expr)
2895 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2896 default:
2897 return false;
2901 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2902 return 1 if every piece is the integer constant one.
2903 Also return 1 for location wrappers for such a constant. */
2905 bool
2906 integer_each_onep (const_tree expr)
2908 STRIP_ANY_LOCATION_WRAPPER (expr);
2910 if (TREE_CODE (expr) == COMPLEX_CST)
2911 return (integer_onep (TREE_REALPART (expr))
2912 && integer_onep (TREE_IMAGPART (expr)));
2913 else
2914 return integer_onep (expr);
2917 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2918 it contains, or a complex or vector whose subparts are such integers,
2919 or a location wrapper for such a constant. */
2921 bool
2922 integer_all_onesp (const_tree expr)
2924 STRIP_ANY_LOCATION_WRAPPER (expr);
2926 if (TREE_CODE (expr) == COMPLEX_CST
2927 && integer_all_onesp (TREE_REALPART (expr))
2928 && integer_all_onesp (TREE_IMAGPART (expr)))
2929 return true;
2931 else if (TREE_CODE (expr) == VECTOR_CST)
2932 return (VECTOR_CST_NPATTERNS (expr) == 1
2933 && VECTOR_CST_DUPLICATE_P (expr)
2934 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2936 else if (TREE_CODE (expr) != INTEGER_CST)
2937 return false;
2939 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2940 == wi::to_wide (expr));
2943 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2944 for such a constant. */
2946 bool
2947 integer_minus_onep (const_tree expr)
2949 STRIP_ANY_LOCATION_WRAPPER (expr);
2951 if (TREE_CODE (expr) == COMPLEX_CST)
2952 return (integer_all_onesp (TREE_REALPART (expr))
2953 && integer_zerop (TREE_IMAGPART (expr)));
2954 else
2955 return integer_all_onesp (expr);
2958 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2959 one bit on), or a location wrapper for such a constant. */
2961 bool
2962 integer_pow2p (const_tree expr)
2964 STRIP_ANY_LOCATION_WRAPPER (expr);
2966 if (TREE_CODE (expr) == COMPLEX_CST
2967 && integer_pow2p (TREE_REALPART (expr))
2968 && integer_zerop (TREE_IMAGPART (expr)))
2969 return true;
2971 if (TREE_CODE (expr) != INTEGER_CST)
2972 return false;
2974 return wi::popcount (wi::to_wide (expr)) == 1;
2977 /* Return 1 if EXPR is an integer constant other than zero or a
2978 complex constant other than zero, or a location wrapper for such a
2979 constant. */
2981 bool
2982 integer_nonzerop (const_tree expr)
2984 STRIP_ANY_LOCATION_WRAPPER (expr);
2986 return ((TREE_CODE (expr) == INTEGER_CST
2987 && wi::to_wide (expr) != 0)
2988 || (TREE_CODE (expr) == COMPLEX_CST
2989 && (integer_nonzerop (TREE_REALPART (expr))
2990 || integer_nonzerop (TREE_IMAGPART (expr)))));
2993 /* Return 1 if EXPR is the integer constant one. For vector,
2994 return 1 if every piece is the integer constant minus one
2995 (representing the value TRUE).
2996 Also return 1 for location wrappers for such a constant. */
2998 bool
2999 integer_truep (const_tree expr)
3001 STRIP_ANY_LOCATION_WRAPPER (expr);
3003 if (TREE_CODE (expr) == VECTOR_CST)
3004 return integer_all_onesp (expr);
3005 return integer_onep (expr);
3008 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
3009 for such a constant. */
3011 bool
3012 fixed_zerop (const_tree expr)
3014 STRIP_ANY_LOCATION_WRAPPER (expr);
3016 return (TREE_CODE (expr) == FIXED_CST
3017 && TREE_FIXED_CST (expr).data.is_zero ());
3020 /* Return the power of two represented by a tree node known to be a
3021 power of two. */
3024 tree_log2 (const_tree expr)
3026 if (TREE_CODE (expr) == COMPLEX_CST)
3027 return tree_log2 (TREE_REALPART (expr));
3029 return wi::exact_log2 (wi::to_wide (expr));
3032 /* Similar, but return the largest integer Y such that 2 ** Y is less
3033 than or equal to EXPR. */
3036 tree_floor_log2 (const_tree expr)
3038 if (TREE_CODE (expr) == COMPLEX_CST)
3039 return tree_log2 (TREE_REALPART (expr));
3041 return wi::floor_log2 (wi::to_wide (expr));
3044 /* Return number of known trailing zero bits in EXPR, or, if the value of
3045 EXPR is known to be zero, the precision of it's type. */
3047 unsigned int
3048 tree_ctz (const_tree expr)
3050 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3051 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3052 return 0;
3054 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3055 switch (TREE_CODE (expr))
3057 case INTEGER_CST:
3058 ret1 = wi::ctz (wi::to_wide (expr));
3059 return MIN (ret1, prec);
3060 case SSA_NAME:
3061 ret1 = wi::ctz (get_nonzero_bits (expr));
3062 return MIN (ret1, prec);
3063 case PLUS_EXPR:
3064 case MINUS_EXPR:
3065 case BIT_IOR_EXPR:
3066 case BIT_XOR_EXPR:
3067 case MIN_EXPR:
3068 case MAX_EXPR:
3069 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3070 if (ret1 == 0)
3071 return ret1;
3072 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3073 return MIN (ret1, ret2);
3074 case POINTER_PLUS_EXPR:
3075 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3076 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3077 /* Second operand is sizetype, which could be in theory
3078 wider than pointer's precision. Make sure we never
3079 return more than prec. */
3080 ret2 = MIN (ret2, prec);
3081 return MIN (ret1, ret2);
3082 case BIT_AND_EXPR:
3083 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3084 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3085 return MAX (ret1, ret2);
3086 case MULT_EXPR:
3087 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3088 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3089 return MIN (ret1 + ret2, prec);
3090 case LSHIFT_EXPR:
3091 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3092 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3093 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3095 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3096 return MIN (ret1 + ret2, prec);
3098 return ret1;
3099 case RSHIFT_EXPR:
3100 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3101 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3103 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3104 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3105 if (ret1 > ret2)
3106 return ret1 - ret2;
3108 return 0;
3109 case TRUNC_DIV_EXPR:
3110 case CEIL_DIV_EXPR:
3111 case FLOOR_DIV_EXPR:
3112 case ROUND_DIV_EXPR:
3113 case EXACT_DIV_EXPR:
3114 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3115 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3117 int l = tree_log2 (TREE_OPERAND (expr, 1));
3118 if (l >= 0)
3120 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3121 ret2 = l;
3122 if (ret1 > ret2)
3123 return ret1 - ret2;
3126 return 0;
3127 CASE_CONVERT:
3128 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3129 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3130 ret1 = prec;
3131 return MIN (ret1, prec);
3132 case SAVE_EXPR:
3133 return tree_ctz (TREE_OPERAND (expr, 0));
3134 case COND_EXPR:
3135 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3136 if (ret1 == 0)
3137 return 0;
3138 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3139 return MIN (ret1, ret2);
3140 case COMPOUND_EXPR:
3141 return tree_ctz (TREE_OPERAND (expr, 1));
3142 case ADDR_EXPR:
3143 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3144 if (ret1 > BITS_PER_UNIT)
3146 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3147 return MIN (ret1, prec);
3149 return 0;
3150 default:
3151 return 0;
3155 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3156 decimal float constants, so don't return 1 for them.
3157 Also return 1 for location wrappers around such a constant. */
3159 bool
3160 real_zerop (const_tree expr)
3162 STRIP_ANY_LOCATION_WRAPPER (expr);
3164 switch (TREE_CODE (expr))
3166 case REAL_CST:
3167 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3168 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3169 case COMPLEX_CST:
3170 return real_zerop (TREE_REALPART (expr))
3171 && real_zerop (TREE_IMAGPART (expr));
3172 case VECTOR_CST:
3174 /* Don't simply check for a duplicate because the predicate
3175 accepts both +0.0 and -0.0. */
3176 unsigned count = vector_cst_encoded_nelts (expr);
3177 for (unsigned int i = 0; i < count; ++i)
3178 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3179 return false;
3180 return true;
3182 default:
3183 return false;
3187 /* Return 1 if EXPR is the real constant one in real or complex form.
3188 Trailing zeroes matter for decimal float constants, so don't return
3189 1 for them.
3190 Also return 1 for location wrappers around such a constant. */
3192 bool
3193 real_onep (const_tree expr)
3195 STRIP_ANY_LOCATION_WRAPPER (expr);
3197 switch (TREE_CODE (expr))
3199 case REAL_CST:
3200 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3201 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3202 case COMPLEX_CST:
3203 return real_onep (TREE_REALPART (expr))
3204 && real_zerop (TREE_IMAGPART (expr));
3205 case VECTOR_CST:
3206 return (VECTOR_CST_NPATTERNS (expr) == 1
3207 && VECTOR_CST_DUPLICATE_P (expr)
3208 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3209 default:
3210 return false;
3214 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3215 matter for decimal float constants, so don't return 1 for them.
3216 Also return 1 for location wrappers around such a constant. */
3218 bool
3219 real_minus_onep (const_tree expr)
3221 STRIP_ANY_LOCATION_WRAPPER (expr);
3223 switch (TREE_CODE (expr))
3225 case REAL_CST:
3226 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3227 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3228 case COMPLEX_CST:
3229 return real_minus_onep (TREE_REALPART (expr))
3230 && real_zerop (TREE_IMAGPART (expr));
3231 case VECTOR_CST:
3232 return (VECTOR_CST_NPATTERNS (expr) == 1
3233 && VECTOR_CST_DUPLICATE_P (expr)
3234 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3235 default:
3236 return false;
3240 /* Return true if T could be a floating point zero. */
3242 bool
3243 real_maybe_zerop (const_tree expr)
3245 switch (TREE_CODE (expr))
3247 case REAL_CST:
3248 /* Can't use real_zerop here, as it always returns false for decimal
3249 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3250 either, as decimal zeros are rvc_normal. */
3251 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3252 case COMPLEX_CST:
3253 return (real_maybe_zerop (TREE_REALPART (expr))
3254 || real_maybe_zerop (TREE_IMAGPART (expr)));
3255 case VECTOR_CST:
3257 unsigned count = vector_cst_encoded_nelts (expr);
3258 for (unsigned int i = 0; i < count; ++i)
3259 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3260 return true;
3261 return false;
3263 default:
3264 /* Perhaps for SSA_NAMEs we could query frange. */
3265 return true;
3269 /* Nonzero if EXP is a constant or a cast of a constant. */
3271 bool
3272 really_constant_p (const_tree exp)
3274 /* This is not quite the same as STRIP_NOPS. It does more. */
3275 while (CONVERT_EXPR_P (exp)
3276 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3277 exp = TREE_OPERAND (exp, 0);
3278 return TREE_CONSTANT (exp);
3281 /* Return true if T holds a polynomial pointer difference, storing it in
3282 *VALUE if so. A true return means that T's precision is no greater
3283 than 64 bits, which is the largest address space we support, so *VALUE
3284 never loses precision. However, the signedness of the result does
3285 not necessarily match the signedness of T: sometimes an unsigned type
3286 like sizetype is used to encode a value that is actually negative. */
3288 bool
3289 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3291 if (!t)
3292 return false;
3293 if (TREE_CODE (t) == INTEGER_CST)
3295 if (!cst_and_fits_in_hwi (t))
3296 return false;
3297 *value = int_cst_value (t);
3298 return true;
3300 if (POLY_INT_CST_P (t))
3302 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3303 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3304 return false;
3305 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3306 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3307 return true;
3309 return false;
3312 poly_int64
3313 tree_to_poly_int64 (const_tree t)
3315 gcc_assert (tree_fits_poly_int64_p (t));
3316 if (POLY_INT_CST_P (t))
3317 return poly_int_cst_value (t).force_shwi ();
3318 return TREE_INT_CST_LOW (t);
3321 poly_uint64
3322 tree_to_poly_uint64 (const_tree t)
3324 gcc_assert (tree_fits_poly_uint64_p (t));
3325 if (POLY_INT_CST_P (t))
3326 return poly_int_cst_value (t).force_uhwi ();
3327 return TREE_INT_CST_LOW (t);
3330 /* Return first list element whose TREE_VALUE is ELEM.
3331 Return 0 if ELEM is not in LIST. */
3333 tree
3334 value_member (tree elem, tree list)
3336 while (list)
3338 if (elem == TREE_VALUE (list))
3339 return list;
3340 list = TREE_CHAIN (list);
3342 return NULL_TREE;
3345 /* Return first list element whose TREE_PURPOSE is ELEM.
3346 Return 0 if ELEM is not in LIST. */
3348 tree
3349 purpose_member (const_tree elem, tree list)
3351 while (list)
3353 if (elem == TREE_PURPOSE (list))
3354 return list;
3355 list = TREE_CHAIN (list);
3357 return NULL_TREE;
3360 /* Return true if ELEM is in V. */
3362 bool
3363 vec_member (const_tree elem, vec<tree, va_gc> *v)
3365 unsigned ix;
3366 tree t;
3367 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3368 if (elem == t)
3369 return true;
3370 return false;
3373 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3374 NULL_TREE. */
3376 tree
3377 chain_index (int idx, tree chain)
3379 for (; chain && idx > 0; --idx)
3380 chain = TREE_CHAIN (chain);
3381 return chain;
3384 /* Return nonzero if ELEM is part of the chain CHAIN. */
3386 bool
3387 chain_member (const_tree elem, const_tree chain)
3389 while (chain)
3391 if (elem == chain)
3392 return true;
3393 chain = DECL_CHAIN (chain);
3396 return false;
3399 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3400 We expect a null pointer to mark the end of the chain.
3401 This is the Lisp primitive `length'. */
3404 list_length (const_tree t)
3406 const_tree p = t;
3407 #ifdef ENABLE_TREE_CHECKING
3408 const_tree q = t;
3409 #endif
3410 int len = 0;
3412 while (p)
3414 p = TREE_CHAIN (p);
3415 #ifdef ENABLE_TREE_CHECKING
3416 if (len % 2)
3417 q = TREE_CHAIN (q);
3418 gcc_assert (p != q);
3419 #endif
3420 len++;
3423 return len;
3426 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3427 UNION_TYPE TYPE, or NULL_TREE if none. */
3429 tree
3430 first_field (const_tree type)
3432 tree t = TYPE_FIELDS (type);
3433 while (t && TREE_CODE (t) != FIELD_DECL)
3434 t = TREE_CHAIN (t);
3435 return t;
3438 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3439 UNION_TYPE TYPE, or NULL_TREE if none. */
3441 tree
3442 last_field (const_tree type)
3444 tree last = NULL_TREE;
3446 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3448 if (TREE_CODE (fld) != FIELD_DECL)
3449 continue;
3451 last = fld;
3454 return last;
3457 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3458 by modifying the last node in chain 1 to point to chain 2.
3459 This is the Lisp primitive `nconc'. */
3461 tree
3462 chainon (tree op1, tree op2)
3464 tree t1;
3466 if (!op1)
3467 return op2;
3468 if (!op2)
3469 return op1;
3471 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3472 continue;
3473 TREE_CHAIN (t1) = op2;
3475 #ifdef ENABLE_TREE_CHECKING
3477 tree t2;
3478 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3479 gcc_assert (t2 != t1);
3481 #endif
3483 return op1;
3486 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3488 tree
3489 tree_last (tree chain)
3491 tree next;
3492 if (chain)
3493 while ((next = TREE_CHAIN (chain)))
3494 chain = next;
3495 return chain;
3498 /* Reverse the order of elements in the chain T,
3499 and return the new head of the chain (old last element). */
3501 tree
3502 nreverse (tree t)
3504 tree prev = 0, decl, next;
3505 for (decl = t; decl; decl = next)
3507 /* We shouldn't be using this function to reverse BLOCK chains; we
3508 have blocks_nreverse for that. */
3509 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3510 next = TREE_CHAIN (decl);
3511 TREE_CHAIN (decl) = prev;
3512 prev = decl;
3514 return prev;
3517 /* Return a newly created TREE_LIST node whose
3518 purpose and value fields are PARM and VALUE. */
3520 tree
3521 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3523 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3524 TREE_PURPOSE (t) = parm;
3525 TREE_VALUE (t) = value;
3526 return t;
3529 /* Build a chain of TREE_LIST nodes from a vector. */
3531 tree
3532 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3534 tree ret = NULL_TREE;
3535 tree *pp = &ret;
3536 unsigned int i;
3537 tree t;
3538 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3540 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3541 pp = &TREE_CHAIN (*pp);
3543 return ret;
3546 /* Return a newly created TREE_LIST node whose
3547 purpose and value fields are PURPOSE and VALUE
3548 and whose TREE_CHAIN is CHAIN. */
3550 tree
3551 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3553 tree node;
3555 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3556 memset (node, 0, sizeof (struct tree_common));
3558 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3560 TREE_SET_CODE (node, TREE_LIST);
3561 TREE_CHAIN (node) = chain;
3562 TREE_PURPOSE (node) = purpose;
3563 TREE_VALUE (node) = value;
3564 return node;
3567 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3568 trees. */
3570 vec<tree, va_gc> *
3571 ctor_to_vec (tree ctor)
3573 vec<tree, va_gc> *vec;
3574 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3575 unsigned int ix;
3576 tree val;
3578 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3579 vec->quick_push (val);
3581 return vec;
3584 /* Return the size nominally occupied by an object of type TYPE
3585 when it resides in memory. The value is measured in units of bytes,
3586 and its data type is that normally used for type sizes
3587 (which is the first type created by make_signed_type or
3588 make_unsigned_type). */
3590 tree
3591 size_in_bytes_loc (location_t loc, const_tree type)
3593 tree t;
3595 if (type == error_mark_node)
3596 return integer_zero_node;
3598 type = TYPE_MAIN_VARIANT (type);
3599 t = TYPE_SIZE_UNIT (type);
3601 if (t == 0)
3603 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3604 return size_zero_node;
3607 return t;
3610 /* Return the size of TYPE (in bytes) as a wide integer
3611 or return -1 if the size can vary or is larger than an integer. */
3613 HOST_WIDE_INT
3614 int_size_in_bytes (const_tree type)
3616 tree t;
3618 if (type == error_mark_node)
3619 return 0;
3621 type = TYPE_MAIN_VARIANT (type);
3622 t = TYPE_SIZE_UNIT (type);
3624 if (t && tree_fits_uhwi_p (t))
3625 return TREE_INT_CST_LOW (t);
3626 else
3627 return -1;
3630 /* Return the maximum size of TYPE (in bytes) as a wide integer
3631 or return -1 if the size can vary or is larger than an integer. */
3633 HOST_WIDE_INT
3634 max_int_size_in_bytes (const_tree type)
3636 HOST_WIDE_INT size = -1;
3637 tree size_tree;
3639 /* If this is an array type, check for a possible MAX_SIZE attached. */
3641 if (TREE_CODE (type) == ARRAY_TYPE)
3643 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3645 if (size_tree && tree_fits_uhwi_p (size_tree))
3646 size = tree_to_uhwi (size_tree);
3649 /* If we still haven't been able to get a size, see if the language
3650 can compute a maximum size. */
3652 if (size == -1)
3654 size_tree = lang_hooks.types.max_size (type);
3656 if (size_tree && tree_fits_uhwi_p (size_tree))
3657 size = tree_to_uhwi (size_tree);
3660 return size;
3663 /* Return the bit position of FIELD, in bits from the start of the record.
3664 This is a tree of type bitsizetype. */
3666 tree
3667 bit_position (const_tree field)
3669 return bit_from_pos (DECL_FIELD_OFFSET (field),
3670 DECL_FIELD_BIT_OFFSET (field));
3673 /* Return the byte position of FIELD, in bytes from the start of the record.
3674 This is a tree of type sizetype. */
3676 tree
3677 byte_position (const_tree field)
3679 return byte_from_pos (DECL_FIELD_OFFSET (field),
3680 DECL_FIELD_BIT_OFFSET (field));
3683 /* Likewise, but return as an integer. It must be representable in
3684 that way (since it could be a signed value, we don't have the
3685 option of returning -1 like int_size_in_byte can. */
3687 HOST_WIDE_INT
3688 int_byte_position (const_tree field)
3690 return tree_to_shwi (byte_position (field));
3693 /* Return, as a tree node, the number of elements for TYPE (which is an
3694 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3696 tree
3697 array_type_nelts (const_tree type)
3699 tree index_type, min, max;
3701 /* If they did it with unspecified bounds, then we should have already
3702 given an error about it before we got here. */
3703 if (! TYPE_DOMAIN (type))
3704 return error_mark_node;
3706 index_type = TYPE_DOMAIN (type);
3707 min = TYPE_MIN_VALUE (index_type);
3708 max = TYPE_MAX_VALUE (index_type);
3710 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3711 if (!max)
3713 /* zero sized arrays are represented from C FE as complete types with
3714 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3715 them as min 0, max -1. */
3716 if (COMPLETE_TYPE_P (type)
3717 && integer_zerop (TYPE_SIZE (type))
3718 && integer_zerop (min))
3719 return build_int_cst (TREE_TYPE (min), -1);
3721 return error_mark_node;
3724 return (integer_zerop (min)
3725 ? max
3726 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3729 /* If arg is static -- a reference to an object in static storage -- then
3730 return the object. This is not the same as the C meaning of `static'.
3731 If arg isn't static, return NULL. */
3733 tree
3734 staticp (tree arg)
3736 switch (TREE_CODE (arg))
3738 case FUNCTION_DECL:
3739 /* Nested functions are static, even though taking their address will
3740 involve a trampoline as we unnest the nested function and create
3741 the trampoline on the tree level. */
3742 return arg;
3744 case VAR_DECL:
3745 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3746 && ! DECL_THREAD_LOCAL_P (arg)
3747 && ! DECL_DLLIMPORT_P (arg)
3748 ? arg : NULL);
3750 case CONST_DECL:
3751 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3752 ? arg : NULL);
3754 case CONSTRUCTOR:
3755 return TREE_STATIC (arg) ? arg : NULL;
3757 case LABEL_DECL:
3758 case STRING_CST:
3759 return arg;
3761 case COMPONENT_REF:
3762 /* If the thing being referenced is not a field, then it is
3763 something language specific. */
3764 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3766 /* If we are referencing a bitfield, we can't evaluate an
3767 ADDR_EXPR at compile time and so it isn't a constant. */
3768 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3769 return NULL;
3771 return staticp (TREE_OPERAND (arg, 0));
3773 case BIT_FIELD_REF:
3774 return NULL;
3776 case INDIRECT_REF:
3777 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3779 case ARRAY_REF:
3780 case ARRAY_RANGE_REF:
3781 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3782 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3783 return staticp (TREE_OPERAND (arg, 0));
3784 else
3785 return NULL;
3787 case COMPOUND_LITERAL_EXPR:
3788 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3790 default:
3791 return NULL;
3798 /* Return whether OP is a DECL whose address is function-invariant. */
3800 bool
3801 decl_address_invariant_p (const_tree op)
3803 /* The conditions below are slightly less strict than the one in
3804 staticp. */
3806 switch (TREE_CODE (op))
3808 case PARM_DECL:
3809 case RESULT_DECL:
3810 case LABEL_DECL:
3811 case FUNCTION_DECL:
3812 return true;
3814 case VAR_DECL:
3815 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3816 || DECL_THREAD_LOCAL_P (op)
3817 || DECL_CONTEXT (op) == current_function_decl
3818 || decl_function_context (op) == current_function_decl)
3819 return true;
3820 break;
3822 case CONST_DECL:
3823 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3824 || decl_function_context (op) == current_function_decl)
3825 return true;
3826 break;
3828 default:
3829 break;
3832 return false;
3835 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3837 bool
3838 decl_address_ip_invariant_p (const_tree op)
3840 /* The conditions below are slightly less strict than the one in
3841 staticp. */
3843 switch (TREE_CODE (op))
3845 case LABEL_DECL:
3846 case FUNCTION_DECL:
3847 case STRING_CST:
3848 return true;
3850 case VAR_DECL:
3851 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3852 && !DECL_DLLIMPORT_P (op))
3853 || DECL_THREAD_LOCAL_P (op))
3854 return true;
3855 break;
3857 case CONST_DECL:
3858 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3859 return true;
3860 break;
3862 default:
3863 break;
3866 return false;
3870 /* Return true if T is function-invariant (internal function, does
3871 not handle arithmetic; that's handled in skip_simple_arithmetic and
3872 tree_invariant_p). */
3874 static bool
3875 tree_invariant_p_1 (tree t)
3877 tree op;
3879 if (TREE_CONSTANT (t)
3880 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3881 return true;
3883 switch (TREE_CODE (t))
3885 case SAVE_EXPR:
3886 return true;
3888 case ADDR_EXPR:
3889 op = TREE_OPERAND (t, 0);
3890 while (handled_component_p (op))
3892 switch (TREE_CODE (op))
3894 case ARRAY_REF:
3895 case ARRAY_RANGE_REF:
3896 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3897 || TREE_OPERAND (op, 2) != NULL_TREE
3898 || TREE_OPERAND (op, 3) != NULL_TREE)
3899 return false;
3900 break;
3902 case COMPONENT_REF:
3903 if (TREE_OPERAND (op, 2) != NULL_TREE)
3904 return false;
3905 break;
3907 default:;
3909 op = TREE_OPERAND (op, 0);
3912 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3914 default:
3915 break;
3918 return false;
3921 /* Return true if T is function-invariant. */
3923 bool
3924 tree_invariant_p (tree t)
3926 tree inner = skip_simple_arithmetic (t);
3927 return tree_invariant_p_1 (inner);
3930 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3931 Do this to any expression which may be used in more than one place,
3932 but must be evaluated only once.
3934 Normally, expand_expr would reevaluate the expression each time.
3935 Calling save_expr produces something that is evaluated and recorded
3936 the first time expand_expr is called on it. Subsequent calls to
3937 expand_expr just reuse the recorded value.
3939 The call to expand_expr that generates code that actually computes
3940 the value is the first call *at compile time*. Subsequent calls
3941 *at compile time* generate code to use the saved value.
3942 This produces correct result provided that *at run time* control
3943 always flows through the insns made by the first expand_expr
3944 before reaching the other places where the save_expr was evaluated.
3945 You, the caller of save_expr, must make sure this is so.
3947 Constants, and certain read-only nodes, are returned with no
3948 SAVE_EXPR because that is safe. Expressions containing placeholders
3949 are not touched; see tree.def for an explanation of what these
3950 are used for. */
3952 tree
3953 save_expr (tree expr)
3955 tree inner;
3957 /* If the tree evaluates to a constant, then we don't want to hide that
3958 fact (i.e. this allows further folding, and direct checks for constants).
3959 However, a read-only object that has side effects cannot be bypassed.
3960 Since it is no problem to reevaluate literals, we just return the
3961 literal node. */
3962 inner = skip_simple_arithmetic (expr);
3963 if (TREE_CODE (inner) == ERROR_MARK)
3964 return inner;
3966 if (tree_invariant_p_1 (inner))
3967 return expr;
3969 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3970 it means that the size or offset of some field of an object depends on
3971 the value within another field.
3973 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3974 and some variable since it would then need to be both evaluated once and
3975 evaluated more than once. Front-ends must assure this case cannot
3976 happen by surrounding any such subexpressions in their own SAVE_EXPR
3977 and forcing evaluation at the proper time. */
3978 if (contains_placeholder_p (inner))
3979 return expr;
3981 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3983 /* This expression might be placed ahead of a jump to ensure that the
3984 value was computed on both sides of the jump. So make sure it isn't
3985 eliminated as dead. */
3986 TREE_SIDE_EFFECTS (expr) = 1;
3987 return expr;
3990 /* Look inside EXPR into any simple arithmetic operations. Return the
3991 outermost non-arithmetic or non-invariant node. */
3993 tree
3994 skip_simple_arithmetic (tree expr)
3996 /* We don't care about whether this can be used as an lvalue in this
3997 context. */
3998 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3999 expr = TREE_OPERAND (expr, 0);
4001 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
4002 a constant, it will be more efficient to not make another SAVE_EXPR since
4003 it will allow better simplification and GCSE will be able to merge the
4004 computations if they actually occur. */
4005 while (true)
4007 if (UNARY_CLASS_P (expr))
4008 expr = TREE_OPERAND (expr, 0);
4009 else if (BINARY_CLASS_P (expr))
4011 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4012 expr = TREE_OPERAND (expr, 0);
4013 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4014 expr = TREE_OPERAND (expr, 1);
4015 else
4016 break;
4018 else
4019 break;
4022 return expr;
4025 /* Look inside EXPR into simple arithmetic operations involving constants.
4026 Return the outermost non-arithmetic or non-constant node. */
4028 tree
4029 skip_simple_constant_arithmetic (tree expr)
4031 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4032 expr = TREE_OPERAND (expr, 0);
4034 while (true)
4036 if (UNARY_CLASS_P (expr))
4037 expr = TREE_OPERAND (expr, 0);
4038 else if (BINARY_CLASS_P (expr))
4040 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4041 expr = TREE_OPERAND (expr, 0);
4042 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4043 expr = TREE_OPERAND (expr, 1);
4044 else
4045 break;
4047 else
4048 break;
4051 return expr;
4054 /* Return which tree structure is used by T. */
4056 enum tree_node_structure_enum
4057 tree_node_structure (const_tree t)
4059 const enum tree_code code = TREE_CODE (t);
4060 return tree_node_structure_for_code (code);
4063 /* Set various status flags when building a CALL_EXPR object T. */
4065 static void
4066 process_call_operands (tree t)
4068 bool side_effects = TREE_SIDE_EFFECTS (t);
4069 bool read_only = false;
4070 int i = call_expr_flags (t);
4072 /* Calls have side-effects, except those to const or pure functions. */
4073 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4074 side_effects = true;
4075 /* Propagate TREE_READONLY of arguments for const functions. */
4076 if (i & ECF_CONST)
4077 read_only = true;
4079 if (!side_effects || read_only)
4080 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4082 tree op = TREE_OPERAND (t, i);
4083 if (op && TREE_SIDE_EFFECTS (op))
4084 side_effects = true;
4085 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4086 read_only = false;
4089 TREE_SIDE_EFFECTS (t) = side_effects;
4090 TREE_READONLY (t) = read_only;
4093 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4094 size or offset that depends on a field within a record. */
4096 bool
4097 contains_placeholder_p (const_tree exp)
4099 enum tree_code code;
4101 if (!exp)
4102 return 0;
4104 code = TREE_CODE (exp);
4105 if (code == PLACEHOLDER_EXPR)
4106 return 1;
4108 switch (TREE_CODE_CLASS (code))
4110 case tcc_reference:
4111 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4112 position computations since they will be converted into a
4113 WITH_RECORD_EXPR involving the reference, which will assume
4114 here will be valid. */
4115 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4117 case tcc_exceptional:
4118 if (code == TREE_LIST)
4119 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4120 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4121 break;
4123 case tcc_unary:
4124 case tcc_binary:
4125 case tcc_comparison:
4126 case tcc_expression:
4127 switch (code)
4129 case COMPOUND_EXPR:
4130 /* Ignoring the first operand isn't quite right, but works best. */
4131 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4133 case COND_EXPR:
4134 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4135 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4136 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4138 case SAVE_EXPR:
4139 /* The save_expr function never wraps anything containing
4140 a PLACEHOLDER_EXPR. */
4141 return 0;
4143 default:
4144 break;
4147 switch (TREE_CODE_LENGTH (code))
4149 case 1:
4150 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4151 case 2:
4152 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4153 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4154 default:
4155 return 0;
4158 case tcc_vl_exp:
4159 switch (code)
4161 case CALL_EXPR:
4163 const_tree arg;
4164 const_call_expr_arg_iterator iter;
4165 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4166 if (CONTAINS_PLACEHOLDER_P (arg))
4167 return 1;
4168 return 0;
4170 default:
4171 return 0;
4174 default:
4175 return 0;
4177 return 0;
4180 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4181 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4182 field positions. */
4184 static bool
4185 type_contains_placeholder_1 (const_tree type)
4187 /* If the size contains a placeholder or the parent type (component type in
4188 the case of arrays) type involves a placeholder, this type does. */
4189 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4190 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4191 || (!POINTER_TYPE_P (type)
4192 && TREE_TYPE (type)
4193 && type_contains_placeholder_p (TREE_TYPE (type))))
4194 return true;
4196 /* Now do type-specific checks. Note that the last part of the check above
4197 greatly limits what we have to do below. */
4198 switch (TREE_CODE (type))
4200 case VOID_TYPE:
4201 case OPAQUE_TYPE:
4202 case COMPLEX_TYPE:
4203 case ENUMERAL_TYPE:
4204 case BOOLEAN_TYPE:
4205 case POINTER_TYPE:
4206 case OFFSET_TYPE:
4207 case REFERENCE_TYPE:
4208 case METHOD_TYPE:
4209 case FUNCTION_TYPE:
4210 case VECTOR_TYPE:
4211 case NULLPTR_TYPE:
4212 return false;
4214 case INTEGER_TYPE:
4215 case REAL_TYPE:
4216 case FIXED_POINT_TYPE:
4217 /* Here we just check the bounds. */
4218 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4219 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4221 case ARRAY_TYPE:
4222 /* We have already checked the component type above, so just check
4223 the domain type. Flexible array members have a null domain. */
4224 return TYPE_DOMAIN (type) ?
4225 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4227 case RECORD_TYPE:
4228 case UNION_TYPE:
4229 case QUAL_UNION_TYPE:
4231 tree field;
4233 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4234 if (TREE_CODE (field) == FIELD_DECL
4235 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4236 || (TREE_CODE (type) == QUAL_UNION_TYPE
4237 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4238 || type_contains_placeholder_p (TREE_TYPE (field))))
4239 return true;
4241 return false;
4244 default:
4245 gcc_unreachable ();
4249 /* Wrapper around above function used to cache its result. */
4251 bool
4252 type_contains_placeholder_p (tree type)
4254 bool result;
4256 /* If the contains_placeholder_bits field has been initialized,
4257 then we know the answer. */
4258 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4259 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4261 /* Indicate that we've seen this type node, and the answer is false.
4262 This is what we want to return if we run into recursion via fields. */
4263 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4265 /* Compute the real value. */
4266 result = type_contains_placeholder_1 (type);
4268 /* Store the real value. */
4269 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4271 return result;
4274 /* Push tree EXP onto vector QUEUE if it is not already present. */
4276 static void
4277 push_without_duplicates (tree exp, vec<tree> *queue)
4279 unsigned int i;
4280 tree iter;
4282 FOR_EACH_VEC_ELT (*queue, i, iter)
4283 if (simple_cst_equal (iter, exp) == 1)
4284 break;
4286 if (!iter)
4287 queue->safe_push (exp);
4290 /* Given a tree EXP, find all occurrences of references to fields
4291 in a PLACEHOLDER_EXPR and place them in vector REFS without
4292 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4293 we assume here that EXP contains only arithmetic expressions
4294 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4295 argument list. */
4297 void
4298 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4300 enum tree_code code = TREE_CODE (exp);
4301 tree inner;
4302 int i;
4304 /* We handle TREE_LIST and COMPONENT_REF separately. */
4305 if (code == TREE_LIST)
4307 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4308 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4310 else if (code == COMPONENT_REF)
4312 for (inner = TREE_OPERAND (exp, 0);
4313 REFERENCE_CLASS_P (inner);
4314 inner = TREE_OPERAND (inner, 0))
4317 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4318 push_without_duplicates (exp, refs);
4319 else
4320 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4322 else
4323 switch (TREE_CODE_CLASS (code))
4325 case tcc_constant:
4326 break;
4328 case tcc_declaration:
4329 /* Variables allocated to static storage can stay. */
4330 if (!TREE_STATIC (exp))
4331 push_without_duplicates (exp, refs);
4332 break;
4334 case tcc_expression:
4335 /* This is the pattern built in ada/make_aligning_type. */
4336 if (code == ADDR_EXPR
4337 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4339 push_without_duplicates (exp, refs);
4340 break;
4343 /* Fall through. */
4345 case tcc_exceptional:
4346 case tcc_unary:
4347 case tcc_binary:
4348 case tcc_comparison:
4349 case tcc_reference:
4350 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4351 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4352 break;
4354 case tcc_vl_exp:
4355 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4356 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4357 break;
4359 default:
4360 gcc_unreachable ();
4364 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4365 return a tree with all occurrences of references to F in a
4366 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4367 CONST_DECLs. Note that we assume here that EXP contains only
4368 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4369 occurring only in their argument list. */
4371 tree
4372 substitute_in_expr (tree exp, tree f, tree r)
4374 enum tree_code code = TREE_CODE (exp);
4375 tree op0, op1, op2, op3;
4376 tree new_tree;
4378 /* We handle TREE_LIST and COMPONENT_REF separately. */
4379 if (code == TREE_LIST)
4381 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4382 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4383 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4384 return exp;
4386 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4388 else if (code == COMPONENT_REF)
4390 tree inner;
4392 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4393 and it is the right field, replace it with R. */
4394 for (inner = TREE_OPERAND (exp, 0);
4395 REFERENCE_CLASS_P (inner);
4396 inner = TREE_OPERAND (inner, 0))
4399 /* The field. */
4400 op1 = TREE_OPERAND (exp, 1);
4402 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4403 return r;
4405 /* If this expression hasn't been completed let, leave it alone. */
4406 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4407 return exp;
4409 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4410 if (op0 == TREE_OPERAND (exp, 0))
4411 return exp;
4413 new_tree
4414 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4416 else
4417 switch (TREE_CODE_CLASS (code))
4419 case tcc_constant:
4420 return exp;
4422 case tcc_declaration:
4423 if (exp == f)
4424 return r;
4425 else
4426 return exp;
4428 case tcc_expression:
4429 if (exp == f)
4430 return r;
4432 /* Fall through. */
4434 case tcc_exceptional:
4435 case tcc_unary:
4436 case tcc_binary:
4437 case tcc_comparison:
4438 case tcc_reference:
4439 switch (TREE_CODE_LENGTH (code))
4441 case 0:
4442 return exp;
4444 case 1:
4445 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4446 if (op0 == TREE_OPERAND (exp, 0))
4447 return exp;
4449 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4450 break;
4452 case 2:
4453 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4454 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4456 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4457 return exp;
4459 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4460 break;
4462 case 3:
4463 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4464 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4465 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4467 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4468 && op2 == TREE_OPERAND (exp, 2))
4469 return exp;
4471 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4472 break;
4474 case 4:
4475 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4476 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4477 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4478 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4480 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4481 && op2 == TREE_OPERAND (exp, 2)
4482 && op3 == TREE_OPERAND (exp, 3))
4483 return exp;
4485 new_tree
4486 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4487 break;
4489 default:
4490 gcc_unreachable ();
4492 break;
4494 case tcc_vl_exp:
4496 int i;
4498 new_tree = NULL_TREE;
4500 /* If we are trying to replace F with a constant or with another
4501 instance of one of the arguments of the call, inline back
4502 functions which do nothing else than computing a value from
4503 the arguments they are passed. This makes it possible to
4504 fold partially or entirely the replacement expression. */
4505 if (code == CALL_EXPR)
4507 bool maybe_inline = false;
4508 if (CONSTANT_CLASS_P (r))
4509 maybe_inline = true;
4510 else
4511 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4512 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4514 maybe_inline = true;
4515 break;
4517 if (maybe_inline)
4519 tree t = maybe_inline_call_in_expr (exp);
4520 if (t)
4521 return SUBSTITUTE_IN_EXPR (t, f, r);
4525 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4527 tree op = TREE_OPERAND (exp, i);
4528 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4529 if (new_op != op)
4531 if (!new_tree)
4532 new_tree = copy_node (exp);
4533 TREE_OPERAND (new_tree, i) = new_op;
4537 if (new_tree)
4539 new_tree = fold (new_tree);
4540 if (TREE_CODE (new_tree) == CALL_EXPR)
4541 process_call_operands (new_tree);
4543 else
4544 return exp;
4546 break;
4548 default:
4549 gcc_unreachable ();
4552 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4554 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4555 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4557 return new_tree;
4560 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4561 for it within OBJ, a tree that is an object or a chain of references. */
4563 tree
4564 substitute_placeholder_in_expr (tree exp, tree obj)
4566 enum tree_code code = TREE_CODE (exp);
4567 tree op0, op1, op2, op3;
4568 tree new_tree;
4570 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4571 in the chain of OBJ. */
4572 if (code == PLACEHOLDER_EXPR)
4574 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4575 tree elt;
4577 for (elt = obj; elt != 0;
4578 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4579 || TREE_CODE (elt) == COND_EXPR)
4580 ? TREE_OPERAND (elt, 1)
4581 : (REFERENCE_CLASS_P (elt)
4582 || UNARY_CLASS_P (elt)
4583 || BINARY_CLASS_P (elt)
4584 || VL_EXP_CLASS_P (elt)
4585 || EXPRESSION_CLASS_P (elt))
4586 ? TREE_OPERAND (elt, 0) : 0))
4587 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4588 return elt;
4590 for (elt = obj; elt != 0;
4591 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4592 || TREE_CODE (elt) == COND_EXPR)
4593 ? TREE_OPERAND (elt, 1)
4594 : (REFERENCE_CLASS_P (elt)
4595 || UNARY_CLASS_P (elt)
4596 || BINARY_CLASS_P (elt)
4597 || VL_EXP_CLASS_P (elt)
4598 || EXPRESSION_CLASS_P (elt))
4599 ? TREE_OPERAND (elt, 0) : 0))
4600 if (POINTER_TYPE_P (TREE_TYPE (elt))
4601 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4602 == need_type))
4603 return fold_build1 (INDIRECT_REF, need_type, elt);
4605 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4606 survives until RTL generation, there will be an error. */
4607 return exp;
4610 /* TREE_LIST is special because we need to look at TREE_VALUE
4611 and TREE_CHAIN, not TREE_OPERANDS. */
4612 else if (code == TREE_LIST)
4614 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4615 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4616 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4617 return exp;
4619 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4621 else
4622 switch (TREE_CODE_CLASS (code))
4624 case tcc_constant:
4625 case tcc_declaration:
4626 return exp;
4628 case tcc_exceptional:
4629 case tcc_unary:
4630 case tcc_binary:
4631 case tcc_comparison:
4632 case tcc_expression:
4633 case tcc_reference:
4634 case tcc_statement:
4635 switch (TREE_CODE_LENGTH (code))
4637 case 0:
4638 return exp;
4640 case 1:
4641 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4642 if (op0 == TREE_OPERAND (exp, 0))
4643 return exp;
4645 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4646 break;
4648 case 2:
4649 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4650 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4652 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4653 return exp;
4655 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4656 break;
4658 case 3:
4659 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4660 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4661 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4663 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4664 && op2 == TREE_OPERAND (exp, 2))
4665 return exp;
4667 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4668 break;
4670 case 4:
4671 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4672 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4673 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4674 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4676 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4677 && op2 == TREE_OPERAND (exp, 2)
4678 && op3 == TREE_OPERAND (exp, 3))
4679 return exp;
4681 new_tree
4682 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4683 break;
4685 default:
4686 gcc_unreachable ();
4688 break;
4690 case tcc_vl_exp:
4692 int i;
4694 new_tree = NULL_TREE;
4696 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4698 tree op = TREE_OPERAND (exp, i);
4699 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4700 if (new_op != op)
4702 if (!new_tree)
4703 new_tree = copy_node (exp);
4704 TREE_OPERAND (new_tree, i) = new_op;
4708 if (new_tree)
4710 new_tree = fold (new_tree);
4711 if (TREE_CODE (new_tree) == CALL_EXPR)
4712 process_call_operands (new_tree);
4714 else
4715 return exp;
4717 break;
4719 default:
4720 gcc_unreachable ();
4723 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4725 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4726 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4728 return new_tree;
4732 /* Subroutine of stabilize_reference; this is called for subtrees of
4733 references. Any expression with side-effects must be put in a SAVE_EXPR
4734 to ensure that it is only evaluated once.
4736 We don't put SAVE_EXPR nodes around everything, because assigning very
4737 simple expressions to temporaries causes us to miss good opportunities
4738 for optimizations. Among other things, the opportunity to fold in the
4739 addition of a constant into an addressing mode often gets lost, e.g.
4740 "y[i+1] += x;". In general, we take the approach that we should not make
4741 an assignment unless we are forced into it - i.e., that any non-side effect
4742 operator should be allowed, and that cse should take care of coalescing
4743 multiple utterances of the same expression should that prove fruitful. */
4745 static tree
4746 stabilize_reference_1 (tree e)
4748 tree result;
4749 enum tree_code code = TREE_CODE (e);
4751 /* We cannot ignore const expressions because it might be a reference
4752 to a const array but whose index contains side-effects. But we can
4753 ignore things that are actual constant or that already have been
4754 handled by this function. */
4756 if (tree_invariant_p (e))
4757 return e;
4759 switch (TREE_CODE_CLASS (code))
4761 case tcc_exceptional:
4762 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4763 have side-effects. */
4764 if (code == STATEMENT_LIST)
4765 return save_expr (e);
4766 /* FALLTHRU */
4767 case tcc_type:
4768 case tcc_declaration:
4769 case tcc_comparison:
4770 case tcc_statement:
4771 case tcc_expression:
4772 case tcc_reference:
4773 case tcc_vl_exp:
4774 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4775 so that it will only be evaluated once. */
4776 /* The reference (r) and comparison (<) classes could be handled as
4777 below, but it is generally faster to only evaluate them once. */
4778 if (TREE_SIDE_EFFECTS (e))
4779 return save_expr (e);
4780 return e;
4782 case tcc_constant:
4783 /* Constants need no processing. In fact, we should never reach
4784 here. */
4785 return e;
4787 case tcc_binary:
4788 /* Division is slow and tends to be compiled with jumps,
4789 especially the division by powers of 2 that is often
4790 found inside of an array reference. So do it just once. */
4791 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4792 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4793 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4794 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4795 return save_expr (e);
4796 /* Recursively stabilize each operand. */
4797 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4798 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4799 break;
4801 case tcc_unary:
4802 /* Recursively stabilize each operand. */
4803 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4804 break;
4806 default:
4807 gcc_unreachable ();
4810 TREE_TYPE (result) = TREE_TYPE (e);
4811 TREE_READONLY (result) = TREE_READONLY (e);
4812 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4813 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4815 return result;
4818 /* Stabilize a reference so that we can use it any number of times
4819 without causing its operands to be evaluated more than once.
4820 Returns the stabilized reference. This works by means of save_expr,
4821 so see the caveats in the comments about save_expr.
4823 Also allows conversion expressions whose operands are references.
4824 Any other kind of expression is returned unchanged. */
4826 tree
4827 stabilize_reference (tree ref)
4829 tree result;
4830 enum tree_code code = TREE_CODE (ref);
4832 switch (code)
4834 case VAR_DECL:
4835 case PARM_DECL:
4836 case RESULT_DECL:
4837 /* No action is needed in this case. */
4838 return ref;
4840 CASE_CONVERT:
4841 case FLOAT_EXPR:
4842 case FIX_TRUNC_EXPR:
4843 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4844 break;
4846 case INDIRECT_REF:
4847 result = build_nt (INDIRECT_REF,
4848 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4849 break;
4851 case COMPONENT_REF:
4852 result = build_nt (COMPONENT_REF,
4853 stabilize_reference (TREE_OPERAND (ref, 0)),
4854 TREE_OPERAND (ref, 1), NULL_TREE);
4855 break;
4857 case BIT_FIELD_REF:
4858 result = build_nt (BIT_FIELD_REF,
4859 stabilize_reference (TREE_OPERAND (ref, 0)),
4860 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4861 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4862 break;
4864 case ARRAY_REF:
4865 result = build_nt (ARRAY_REF,
4866 stabilize_reference (TREE_OPERAND (ref, 0)),
4867 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4868 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4869 break;
4871 case ARRAY_RANGE_REF:
4872 result = build_nt (ARRAY_RANGE_REF,
4873 stabilize_reference (TREE_OPERAND (ref, 0)),
4874 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4875 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4876 break;
4878 case COMPOUND_EXPR:
4879 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4880 it wouldn't be ignored. This matters when dealing with
4881 volatiles. */
4882 return stabilize_reference_1 (ref);
4884 /* If arg isn't a kind of lvalue we recognize, make no change.
4885 Caller should recognize the error for an invalid lvalue. */
4886 default:
4887 return ref;
4889 case ERROR_MARK:
4890 return error_mark_node;
4893 TREE_TYPE (result) = TREE_TYPE (ref);
4894 TREE_READONLY (result) = TREE_READONLY (ref);
4895 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4896 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4897 protected_set_expr_location (result, EXPR_LOCATION (ref));
4899 return result;
4902 /* Low-level constructors for expressions. */
4904 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4905 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4907 void
4908 recompute_tree_invariant_for_addr_expr (tree t)
4910 tree node;
4911 bool tc = true, se = false;
4913 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4915 /* We started out assuming this address is both invariant and constant, but
4916 does not have side effects. Now go down any handled components and see if
4917 any of them involve offsets that are either non-constant or non-invariant.
4918 Also check for side-effects.
4920 ??? Note that this code makes no attempt to deal with the case where
4921 taking the address of something causes a copy due to misalignment. */
4923 #define UPDATE_FLAGS(NODE) \
4924 do { tree _node = (NODE); \
4925 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4926 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4928 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4929 node = TREE_OPERAND (node, 0))
4931 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4932 array reference (probably made temporarily by the G++ front end),
4933 so ignore all the operands. */
4934 if ((TREE_CODE (node) == ARRAY_REF
4935 || TREE_CODE (node) == ARRAY_RANGE_REF)
4936 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4938 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4939 if (TREE_OPERAND (node, 2))
4940 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4941 if (TREE_OPERAND (node, 3))
4942 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4944 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4945 FIELD_DECL, apparently. The G++ front end can put something else
4946 there, at least temporarily. */
4947 else if (TREE_CODE (node) == COMPONENT_REF
4948 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4950 if (TREE_OPERAND (node, 2))
4951 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4955 node = lang_hooks.expr_to_decl (node, &tc, &se);
4957 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4958 the address, since &(*a)->b is a form of addition. If it's a constant, the
4959 address is constant too. If it's a decl, its address is constant if the
4960 decl is static. Everything else is not constant and, furthermore,
4961 taking the address of a volatile variable is not volatile. */
4962 if (INDIRECT_REF_P (node)
4963 || TREE_CODE (node) == MEM_REF)
4964 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4965 else if (CONSTANT_CLASS_P (node))
4967 else if (DECL_P (node))
4968 tc &= (staticp (node) != NULL_TREE);
4969 else
4971 tc = false;
4972 se |= TREE_SIDE_EFFECTS (node);
4976 TREE_CONSTANT (t) = tc;
4977 TREE_SIDE_EFFECTS (t) = se;
4978 #undef UPDATE_FLAGS
4981 /* Build an expression of code CODE, data type TYPE, and operands as
4982 specified. Expressions and reference nodes can be created this way.
4983 Constants, decls, types and misc nodes cannot be.
4985 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4986 enough for all extant tree codes. */
4988 tree
4989 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4991 tree t;
4993 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4995 t = make_node (code PASS_MEM_STAT);
4996 TREE_TYPE (t) = tt;
4998 return t;
5001 tree
5002 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
5004 int length = sizeof (struct tree_exp);
5005 tree t;
5007 record_node_allocation_statistics (code, length);
5009 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5011 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5013 memset (t, 0, sizeof (struct tree_common));
5015 TREE_SET_CODE (t, code);
5017 TREE_TYPE (t) = type;
5018 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5019 TREE_OPERAND (t, 0) = node;
5020 if (node && !TYPE_P (node))
5022 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5023 TREE_READONLY (t) = TREE_READONLY (node);
5026 if (TREE_CODE_CLASS (code) == tcc_statement)
5028 if (code != DEBUG_BEGIN_STMT)
5029 TREE_SIDE_EFFECTS (t) = 1;
5031 else switch (code)
5033 case VA_ARG_EXPR:
5034 /* All of these have side-effects, no matter what their
5035 operands are. */
5036 TREE_SIDE_EFFECTS (t) = 1;
5037 TREE_READONLY (t) = 0;
5038 break;
5040 case INDIRECT_REF:
5041 /* Whether a dereference is readonly has nothing to do with whether
5042 its operand is readonly. */
5043 TREE_READONLY (t) = 0;
5044 break;
5046 case ADDR_EXPR:
5047 if (node)
5048 recompute_tree_invariant_for_addr_expr (t);
5049 break;
5051 default:
5052 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5053 && node && !TYPE_P (node)
5054 && TREE_CONSTANT (node))
5055 TREE_CONSTANT (t) = 1;
5056 if (TREE_CODE_CLASS (code) == tcc_reference
5057 && node && TREE_THIS_VOLATILE (node))
5058 TREE_THIS_VOLATILE (t) = 1;
5059 break;
5062 return t;
5065 #define PROCESS_ARG(N) \
5066 do { \
5067 TREE_OPERAND (t, N) = arg##N; \
5068 if (arg##N &&!TYPE_P (arg##N)) \
5070 if (TREE_SIDE_EFFECTS (arg##N)) \
5071 side_effects = 1; \
5072 if (!TREE_READONLY (arg##N) \
5073 && !CONSTANT_CLASS_P (arg##N)) \
5074 (void) (read_only = 0); \
5075 if (!TREE_CONSTANT (arg##N)) \
5076 (void) (constant = 0); \
5078 } while (0)
5080 tree
5081 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5083 bool constant, read_only, side_effects, div_by_zero;
5084 tree t;
5086 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5088 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5089 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5090 /* When sizetype precision doesn't match that of pointers
5091 we need to be able to build explicit extensions or truncations
5092 of the offset argument. */
5093 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5094 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5095 && TREE_CODE (arg1) == INTEGER_CST);
5097 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5098 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5099 && ptrofftype_p (TREE_TYPE (arg1)));
5101 t = make_node (code PASS_MEM_STAT);
5102 TREE_TYPE (t) = tt;
5104 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5105 result based on those same flags for the arguments. But if the
5106 arguments aren't really even `tree' expressions, we shouldn't be trying
5107 to do this. */
5109 /* Expressions without side effects may be constant if their
5110 arguments are as well. */
5111 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5112 || TREE_CODE_CLASS (code) == tcc_binary);
5113 read_only = 1;
5114 side_effects = TREE_SIDE_EFFECTS (t);
5116 switch (code)
5118 case TRUNC_DIV_EXPR:
5119 case CEIL_DIV_EXPR:
5120 case FLOOR_DIV_EXPR:
5121 case ROUND_DIV_EXPR:
5122 case EXACT_DIV_EXPR:
5123 case CEIL_MOD_EXPR:
5124 case FLOOR_MOD_EXPR:
5125 case ROUND_MOD_EXPR:
5126 case TRUNC_MOD_EXPR:
5127 div_by_zero = integer_zerop (arg1);
5128 break;
5129 default:
5130 div_by_zero = false;
5133 PROCESS_ARG (0);
5134 PROCESS_ARG (1);
5136 TREE_SIDE_EFFECTS (t) = side_effects;
5137 if (code == MEM_REF)
5139 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5141 tree o = TREE_OPERAND (arg0, 0);
5142 TREE_READONLY (t) = TREE_READONLY (o);
5143 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5146 else
5148 TREE_READONLY (t) = read_only;
5149 /* Don't mark X / 0 as constant. */
5150 TREE_CONSTANT (t) = constant && !div_by_zero;
5151 TREE_THIS_VOLATILE (t)
5152 = (TREE_CODE_CLASS (code) == tcc_reference
5153 && arg0 && TREE_THIS_VOLATILE (arg0));
5156 return t;
5160 tree
5161 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5162 tree arg2 MEM_STAT_DECL)
5164 bool constant, read_only, side_effects;
5165 tree t;
5167 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5168 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5170 t = make_node (code PASS_MEM_STAT);
5171 TREE_TYPE (t) = tt;
5173 read_only = 1;
5175 /* As a special exception, if COND_EXPR has NULL branches, we
5176 assume that it is a gimple statement and always consider
5177 it to have side effects. */
5178 if (code == COND_EXPR
5179 && tt == void_type_node
5180 && arg1 == NULL_TREE
5181 && arg2 == NULL_TREE)
5182 side_effects = true;
5183 else
5184 side_effects = TREE_SIDE_EFFECTS (t);
5186 PROCESS_ARG (0);
5187 PROCESS_ARG (1);
5188 PROCESS_ARG (2);
5190 if (code == COND_EXPR)
5191 TREE_READONLY (t) = read_only;
5193 TREE_SIDE_EFFECTS (t) = side_effects;
5194 TREE_THIS_VOLATILE (t)
5195 = (TREE_CODE_CLASS (code) == tcc_reference
5196 && arg0 && TREE_THIS_VOLATILE (arg0));
5198 return t;
5201 tree
5202 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5203 tree arg2, tree arg3 MEM_STAT_DECL)
5205 bool constant, read_only, side_effects;
5206 tree t;
5208 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5210 t = make_node (code PASS_MEM_STAT);
5211 TREE_TYPE (t) = tt;
5213 side_effects = TREE_SIDE_EFFECTS (t);
5215 PROCESS_ARG (0);
5216 PROCESS_ARG (1);
5217 PROCESS_ARG (2);
5218 PROCESS_ARG (3);
5220 TREE_SIDE_EFFECTS (t) = side_effects;
5221 TREE_THIS_VOLATILE (t)
5222 = (TREE_CODE_CLASS (code) == tcc_reference
5223 && arg0 && TREE_THIS_VOLATILE (arg0));
5225 return t;
5228 tree
5229 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5230 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5232 bool constant, read_only, side_effects;
5233 tree t;
5235 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5237 t = make_node (code PASS_MEM_STAT);
5238 TREE_TYPE (t) = tt;
5240 side_effects = TREE_SIDE_EFFECTS (t);
5242 PROCESS_ARG (0);
5243 PROCESS_ARG (1);
5244 PROCESS_ARG (2);
5245 PROCESS_ARG (3);
5246 PROCESS_ARG (4);
5248 TREE_SIDE_EFFECTS (t) = side_effects;
5249 if (code == TARGET_MEM_REF)
5251 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5253 tree o = TREE_OPERAND (arg0, 0);
5254 TREE_READONLY (t) = TREE_READONLY (o);
5255 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5258 else
5259 TREE_THIS_VOLATILE (t)
5260 = (TREE_CODE_CLASS (code) == tcc_reference
5261 && arg0 && TREE_THIS_VOLATILE (arg0));
5263 return t;
5266 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5267 on the pointer PTR. */
5269 tree
5270 build_simple_mem_ref_loc (location_t loc, tree ptr)
5272 poly_int64 offset = 0;
5273 tree ptype = TREE_TYPE (ptr);
5274 tree tem;
5275 /* For convenience allow addresses that collapse to a simple base
5276 and offset. */
5277 if (TREE_CODE (ptr) == ADDR_EXPR
5278 && (handled_component_p (TREE_OPERAND (ptr, 0))
5279 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5281 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5282 gcc_assert (ptr);
5283 if (TREE_CODE (ptr) == MEM_REF)
5285 offset += mem_ref_offset (ptr).force_shwi ();
5286 ptr = TREE_OPERAND (ptr, 0);
5288 else
5289 ptr = build_fold_addr_expr (ptr);
5290 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5292 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5293 ptr, build_int_cst (ptype, offset));
5294 SET_EXPR_LOCATION (tem, loc);
5295 return tem;
5298 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5300 poly_offset_int
5301 mem_ref_offset (const_tree t)
5303 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5304 SIGNED);
5307 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5308 offsetted by OFFSET units. */
5310 tree
5311 build_invariant_address (tree type, tree base, poly_int64 offset)
5313 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5314 build_fold_addr_expr (base),
5315 build_int_cst (ptr_type_node, offset));
5316 tree addr = build1 (ADDR_EXPR, type, ref);
5317 recompute_tree_invariant_for_addr_expr (addr);
5318 return addr;
5321 /* Similar except don't specify the TREE_TYPE
5322 and leave the TREE_SIDE_EFFECTS as 0.
5323 It is permissible for arguments to be null,
5324 or even garbage if their values do not matter. */
5326 tree
5327 build_nt (enum tree_code code, ...)
5329 tree t;
5330 int length;
5331 int i;
5332 va_list p;
5334 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5336 va_start (p, code);
5338 t = make_node (code);
5339 length = TREE_CODE_LENGTH (code);
5341 for (i = 0; i < length; i++)
5342 TREE_OPERAND (t, i) = va_arg (p, tree);
5344 va_end (p);
5345 return t;
5348 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5349 tree vec. */
5351 tree
5352 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5354 tree ret, t;
5355 unsigned int ix;
5357 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5358 CALL_EXPR_FN (ret) = fn;
5359 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5360 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5361 CALL_EXPR_ARG (ret, ix) = t;
5362 return ret;
5365 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5366 and data type TYPE.
5367 We do NOT enter this node in any sort of symbol table.
5369 LOC is the location of the decl.
5371 layout_decl is used to set up the decl's storage layout.
5372 Other slots are initialized to 0 or null pointers. */
5374 tree
5375 build_decl (location_t loc, enum tree_code code, tree name,
5376 tree type MEM_STAT_DECL)
5378 tree t;
5380 t = make_node (code PASS_MEM_STAT);
5381 DECL_SOURCE_LOCATION (t) = loc;
5383 /* if (type == error_mark_node)
5384 type = integer_type_node; */
5385 /* That is not done, deliberately, so that having error_mark_node
5386 as the type can suppress useless errors in the use of this variable. */
5388 DECL_NAME (t) = name;
5389 TREE_TYPE (t) = type;
5391 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5392 layout_decl (t, 0);
5394 return t;
5397 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5399 tree
5400 build_debug_expr_decl (tree type)
5402 tree vexpr = make_node (DEBUG_EXPR_DECL);
5403 DECL_ARTIFICIAL (vexpr) = 1;
5404 TREE_TYPE (vexpr) = type;
5405 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5406 return vexpr;
5409 /* Builds and returns function declaration with NAME and TYPE. */
5411 tree
5412 build_fn_decl (const char *name, tree type)
5414 tree id = get_identifier (name);
5415 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5417 DECL_EXTERNAL (decl) = 1;
5418 TREE_PUBLIC (decl) = 1;
5419 DECL_ARTIFICIAL (decl) = 1;
5420 TREE_NOTHROW (decl) = 1;
5422 return decl;
5425 vec<tree, va_gc> *all_translation_units;
5427 /* Builds a new translation-unit decl with name NAME, queues it in the
5428 global list of translation-unit decls and returns it. */
5430 tree
5431 build_translation_unit_decl (tree name)
5433 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5434 name, NULL_TREE);
5435 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5436 vec_safe_push (all_translation_units, tu);
5437 return tu;
5441 /* BLOCK nodes are used to represent the structure of binding contours
5442 and declarations, once those contours have been exited and their contents
5443 compiled. This information is used for outputting debugging info. */
5445 tree
5446 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5448 tree block = make_node (BLOCK);
5450 BLOCK_VARS (block) = vars;
5451 BLOCK_SUBBLOCKS (block) = subblocks;
5452 BLOCK_SUPERCONTEXT (block) = supercontext;
5453 BLOCK_CHAIN (block) = chain;
5454 return block;
5458 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5460 LOC is the location to use in tree T. */
5462 void
5463 protected_set_expr_location (tree t, location_t loc)
5465 if (CAN_HAVE_LOCATION_P (t))
5466 SET_EXPR_LOCATION (t, loc);
5467 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5469 t = expr_single (t);
5470 if (t && CAN_HAVE_LOCATION_P (t))
5471 SET_EXPR_LOCATION (t, loc);
5475 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5476 UNKNOWN_LOCATION. */
5478 void
5479 protected_set_expr_location_if_unset (tree t, location_t loc)
5481 t = expr_single (t);
5482 if (t && !EXPR_HAS_LOCATION (t))
5483 protected_set_expr_location (t, loc);
5486 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5487 of the various TYPE_QUAL values. */
5489 static void
5490 set_type_quals (tree type, int type_quals)
5492 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5493 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5494 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5495 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5496 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5499 /* Returns true iff CAND and BASE have equivalent language-specific
5500 qualifiers. */
5502 bool
5503 check_lang_type (const_tree cand, const_tree base)
5505 if (lang_hooks.types.type_hash_eq == NULL)
5506 return true;
5507 /* type_hash_eq currently only applies to these types. */
5508 if (TREE_CODE (cand) != FUNCTION_TYPE
5509 && TREE_CODE (cand) != METHOD_TYPE)
5510 return true;
5511 return lang_hooks.types.type_hash_eq (cand, base);
5514 /* This function checks to see if TYPE matches the size one of the built-in
5515 atomic types, and returns that core atomic type. */
5517 static tree
5518 find_atomic_core_type (const_tree type)
5520 tree base_atomic_type;
5522 /* Only handle complete types. */
5523 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5524 return NULL_TREE;
5526 switch (tree_to_uhwi (TYPE_SIZE (type)))
5528 case 8:
5529 base_atomic_type = atomicQI_type_node;
5530 break;
5532 case 16:
5533 base_atomic_type = atomicHI_type_node;
5534 break;
5536 case 32:
5537 base_atomic_type = atomicSI_type_node;
5538 break;
5540 case 64:
5541 base_atomic_type = atomicDI_type_node;
5542 break;
5544 case 128:
5545 base_atomic_type = atomicTI_type_node;
5546 break;
5548 default:
5549 base_atomic_type = NULL_TREE;
5552 return base_atomic_type;
5555 /* Returns true iff unqualified CAND and BASE are equivalent. */
5557 bool
5558 check_base_type (const_tree cand, const_tree base)
5560 if (TYPE_NAME (cand) != TYPE_NAME (base)
5561 /* Apparently this is needed for Objective-C. */
5562 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5563 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5564 TYPE_ATTRIBUTES (base)))
5565 return false;
5566 /* Check alignment. */
5567 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5568 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5569 return true;
5570 /* Atomic types increase minimal alignment. We must to do so as well
5571 or we get duplicated canonical types. See PR88686. */
5572 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5574 /* See if this object can map to a basic atomic type. */
5575 tree atomic_type = find_atomic_core_type (cand);
5576 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5577 return true;
5579 return false;
5582 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5584 bool
5585 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5587 return (TYPE_QUALS (cand) == type_quals
5588 && check_base_type (cand, base)
5589 && check_lang_type (cand, base));
5592 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5594 static bool
5595 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5597 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5598 && TYPE_NAME (cand) == TYPE_NAME (base)
5599 /* Apparently this is needed for Objective-C. */
5600 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5601 /* Check alignment. */
5602 && TYPE_ALIGN (cand) == align
5603 /* Check this is a user-aligned type as build_aligned_type
5604 would create. */
5605 && TYPE_USER_ALIGN (cand)
5606 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5607 TYPE_ATTRIBUTES (base))
5608 && check_lang_type (cand, base));
5611 /* Return a version of the TYPE, qualified as indicated by the
5612 TYPE_QUALS, if one exists. If no qualified version exists yet,
5613 return NULL_TREE. */
5615 tree
5616 get_qualified_type (tree type, int type_quals)
5618 if (TYPE_QUALS (type) == type_quals)
5619 return type;
5621 tree mv = TYPE_MAIN_VARIANT (type);
5622 if (check_qualified_type (mv, type, type_quals))
5623 return mv;
5625 /* Search the chain of variants to see if there is already one there just
5626 like the one we need to have. If so, use that existing one. We must
5627 preserve the TYPE_NAME, since there is code that depends on this. */
5628 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5629 if (check_qualified_type (*tp, type, type_quals))
5631 /* Put the found variant at the head of the variant list so
5632 frequently searched variants get found faster. The C++ FE
5633 benefits greatly from this. */
5634 tree t = *tp;
5635 *tp = TYPE_NEXT_VARIANT (t);
5636 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5637 TYPE_NEXT_VARIANT (mv) = t;
5638 return t;
5641 return NULL_TREE;
5644 /* Like get_qualified_type, but creates the type if it does not
5645 exist. This function never returns NULL_TREE. */
5647 tree
5648 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5650 tree t;
5652 /* See if we already have the appropriate qualified variant. */
5653 t = get_qualified_type (type, type_quals);
5655 /* If not, build it. */
5656 if (!t)
5658 t = build_variant_type_copy (type PASS_MEM_STAT);
5659 set_type_quals (t, type_quals);
5661 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5663 /* See if this object can map to a basic atomic type. */
5664 tree atomic_type = find_atomic_core_type (type);
5665 if (atomic_type)
5667 /* Ensure the alignment of this type is compatible with
5668 the required alignment of the atomic type. */
5669 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5670 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5674 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5675 /* Propagate structural equality. */
5676 SET_TYPE_STRUCTURAL_EQUALITY (t);
5677 else if (TYPE_CANONICAL (type) != type)
5678 /* Build the underlying canonical type, since it is different
5679 from TYPE. */
5681 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5682 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5684 else
5685 /* T is its own canonical type. */
5686 TYPE_CANONICAL (t) = t;
5690 return t;
5693 /* Create a variant of type T with alignment ALIGN. */
5695 tree
5696 build_aligned_type (tree type, unsigned int align)
5698 tree t;
5700 if (TYPE_PACKED (type)
5701 || TYPE_ALIGN (type) == align)
5702 return type;
5704 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5705 if (check_aligned_type (t, type, align))
5706 return t;
5708 t = build_variant_type_copy (type);
5709 SET_TYPE_ALIGN (t, align);
5710 TYPE_USER_ALIGN (t) = 1;
5712 return t;
5715 /* Create a new distinct copy of TYPE. The new type is made its own
5716 MAIN_VARIANT. If TYPE requires structural equality checks, the
5717 resulting type requires structural equality checks; otherwise, its
5718 TYPE_CANONICAL points to itself. */
5720 tree
5721 build_distinct_type_copy (tree type MEM_STAT_DECL)
5723 tree t = copy_node (type PASS_MEM_STAT);
5725 TYPE_POINTER_TO (t) = 0;
5726 TYPE_REFERENCE_TO (t) = 0;
5728 /* Set the canonical type either to a new equivalence class, or
5729 propagate the need for structural equality checks. */
5730 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5731 SET_TYPE_STRUCTURAL_EQUALITY (t);
5732 else
5733 TYPE_CANONICAL (t) = t;
5735 /* Make it its own variant. */
5736 TYPE_MAIN_VARIANT (t) = t;
5737 TYPE_NEXT_VARIANT (t) = 0;
5739 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5740 whose TREE_TYPE is not t. This can also happen in the Ada
5741 frontend when using subtypes. */
5743 return t;
5746 /* Create a new variant of TYPE, equivalent but distinct. This is so
5747 the caller can modify it. TYPE_CANONICAL for the return type will
5748 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5749 are considered equal by the language itself (or that both types
5750 require structural equality checks). */
5752 tree
5753 build_variant_type_copy (tree type MEM_STAT_DECL)
5755 tree t, m = TYPE_MAIN_VARIANT (type);
5757 t = build_distinct_type_copy (type PASS_MEM_STAT);
5759 /* Since we're building a variant, assume that it is a non-semantic
5760 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5761 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5762 /* Type variants have no alias set defined. */
5763 TYPE_ALIAS_SET (t) = -1;
5765 /* Add the new type to the chain of variants of TYPE. */
5766 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5767 TYPE_NEXT_VARIANT (m) = t;
5768 TYPE_MAIN_VARIANT (t) = m;
5770 return t;
5773 /* Return true if the from tree in both tree maps are equal. */
5776 tree_map_base_eq (const void *va, const void *vb)
5778 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5779 *const b = (const struct tree_map_base *) vb;
5780 return (a->from == b->from);
5783 /* Hash a from tree in a tree_base_map. */
5785 unsigned int
5786 tree_map_base_hash (const void *item)
5788 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5791 /* Return true if this tree map structure is marked for garbage collection
5792 purposes. We simply return true if the from tree is marked, so that this
5793 structure goes away when the from tree goes away. */
5796 tree_map_base_marked_p (const void *p)
5798 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5801 /* Hash a from tree in a tree_map. */
5803 unsigned int
5804 tree_map_hash (const void *item)
5806 return (((const struct tree_map *) item)->hash);
5809 /* Hash a from tree in a tree_decl_map. */
5811 unsigned int
5812 tree_decl_map_hash (const void *item)
5814 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5817 /* Return the initialization priority for DECL. */
5819 priority_type
5820 decl_init_priority_lookup (tree decl)
5822 symtab_node *snode = symtab_node::get (decl);
5824 if (!snode)
5825 return DEFAULT_INIT_PRIORITY;
5826 return
5827 snode->get_init_priority ();
5830 /* Return the finalization priority for DECL. */
5832 priority_type
5833 decl_fini_priority_lookup (tree decl)
5835 cgraph_node *node = cgraph_node::get (decl);
5837 if (!node)
5838 return DEFAULT_INIT_PRIORITY;
5839 return
5840 node->get_fini_priority ();
5843 /* Set the initialization priority for DECL to PRIORITY. */
5845 void
5846 decl_init_priority_insert (tree decl, priority_type priority)
5848 struct symtab_node *snode;
5850 if (priority == DEFAULT_INIT_PRIORITY)
5852 snode = symtab_node::get (decl);
5853 if (!snode)
5854 return;
5856 else if (VAR_P (decl))
5857 snode = varpool_node::get_create (decl);
5858 else
5859 snode = cgraph_node::get_create (decl);
5860 snode->set_init_priority (priority);
5863 /* Set the finalization priority for DECL to PRIORITY. */
5865 void
5866 decl_fini_priority_insert (tree decl, priority_type priority)
5868 struct cgraph_node *node;
5870 if (priority == DEFAULT_INIT_PRIORITY)
5872 node = cgraph_node::get (decl);
5873 if (!node)
5874 return;
5876 else
5877 node = cgraph_node::get_create (decl);
5878 node->set_fini_priority (priority);
5881 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5883 static void
5884 print_debug_expr_statistics (void)
5886 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5887 (long) debug_expr_for_decl->size (),
5888 (long) debug_expr_for_decl->elements (),
5889 debug_expr_for_decl->collisions ());
5892 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5894 static void
5895 print_value_expr_statistics (void)
5897 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5898 (long) value_expr_for_decl->size (),
5899 (long) value_expr_for_decl->elements (),
5900 value_expr_for_decl->collisions ());
5903 /* Lookup a debug expression for FROM, and return it if we find one. */
5905 tree
5906 decl_debug_expr_lookup (tree from)
5908 struct tree_decl_map *h, in;
5909 in.base.from = from;
5911 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5912 if (h)
5913 return h->to;
5914 return NULL_TREE;
5917 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5919 void
5920 decl_debug_expr_insert (tree from, tree to)
5922 struct tree_decl_map *h;
5924 h = ggc_alloc<tree_decl_map> ();
5925 h->base.from = from;
5926 h->to = to;
5927 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5930 /* Lookup a value expression for FROM, and return it if we find one. */
5932 tree
5933 decl_value_expr_lookup (tree from)
5935 struct tree_decl_map *h, in;
5936 in.base.from = from;
5938 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5939 if (h)
5940 return h->to;
5941 return NULL_TREE;
5944 /* Insert a mapping FROM->TO in the value expression hashtable. */
5946 void
5947 decl_value_expr_insert (tree from, tree to)
5949 struct tree_decl_map *h;
5951 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5952 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5954 h = ggc_alloc<tree_decl_map> ();
5955 h->base.from = from;
5956 h->to = to;
5957 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5960 /* Lookup a vector of debug arguments for FROM, and return it if we
5961 find one. */
5963 vec<tree, va_gc> **
5964 decl_debug_args_lookup (tree from)
5966 struct tree_vec_map *h, in;
5968 if (!DECL_HAS_DEBUG_ARGS_P (from))
5969 return NULL;
5970 gcc_checking_assert (debug_args_for_decl != NULL);
5971 in.base.from = from;
5972 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5973 if (h)
5974 return &h->to;
5975 return NULL;
5978 /* Insert a mapping FROM->empty vector of debug arguments in the value
5979 expression hashtable. */
5981 vec<tree, va_gc> **
5982 decl_debug_args_insert (tree from)
5984 struct tree_vec_map *h;
5985 tree_vec_map **loc;
5987 if (DECL_HAS_DEBUG_ARGS_P (from))
5988 return decl_debug_args_lookup (from);
5989 if (debug_args_for_decl == NULL)
5990 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5991 h = ggc_alloc<tree_vec_map> ();
5992 h->base.from = from;
5993 h->to = NULL;
5994 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5995 *loc = h;
5996 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5997 return &h->to;
6000 /* Hashing of types so that we don't make duplicates.
6001 The entry point is `type_hash_canon'. */
6003 /* Generate the default hash code for TYPE. This is designed for
6004 speed, rather than maximum entropy. */
6006 hashval_t
6007 type_hash_canon_hash (tree type)
6009 inchash::hash hstate;
6011 hstate.add_int (TREE_CODE (type));
6013 if (TREE_TYPE (type))
6014 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6016 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6017 /* Just the identifier is adequate to distinguish. */
6018 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6020 switch (TREE_CODE (type))
6022 case METHOD_TYPE:
6023 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6024 /* FALLTHROUGH. */
6025 case FUNCTION_TYPE:
6026 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6027 if (TREE_VALUE (t) != error_mark_node)
6028 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6029 break;
6031 case OFFSET_TYPE:
6032 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6033 break;
6035 case ARRAY_TYPE:
6037 if (TYPE_DOMAIN (type))
6038 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6039 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6041 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6042 hstate.add_object (typeless);
6045 break;
6047 case INTEGER_TYPE:
6049 tree t = TYPE_MAX_VALUE (type);
6050 if (!t)
6051 t = TYPE_MIN_VALUE (type);
6052 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6053 hstate.add_object (TREE_INT_CST_ELT (t, i));
6054 break;
6057 case REAL_TYPE:
6058 case FIXED_POINT_TYPE:
6060 unsigned prec = TYPE_PRECISION (type);
6061 hstate.add_object (prec);
6062 break;
6065 case VECTOR_TYPE:
6066 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6067 break;
6069 default:
6070 break;
6073 return hstate.end ();
6076 /* These are the Hashtable callback functions. */
6078 /* Returns true iff the types are equivalent. */
6080 bool
6081 type_cache_hasher::equal (type_hash *a, type_hash *b)
6083 /* First test the things that are the same for all types. */
6084 if (a->hash != b->hash
6085 || TREE_CODE (a->type) != TREE_CODE (b->type)
6086 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6087 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6088 TYPE_ATTRIBUTES (b->type))
6089 || (TREE_CODE (a->type) != COMPLEX_TYPE
6090 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6091 return 0;
6093 /* Be careful about comparing arrays before and after the element type
6094 has been completed; don't compare TYPE_ALIGN unless both types are
6095 complete. */
6096 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6097 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6098 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6099 return 0;
6101 switch (TREE_CODE (a->type))
6103 case VOID_TYPE:
6104 case OPAQUE_TYPE:
6105 case COMPLEX_TYPE:
6106 case POINTER_TYPE:
6107 case REFERENCE_TYPE:
6108 case NULLPTR_TYPE:
6109 return 1;
6111 case VECTOR_TYPE:
6112 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6113 TYPE_VECTOR_SUBPARTS (b->type));
6115 case ENUMERAL_TYPE:
6116 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6117 && !(TYPE_VALUES (a->type)
6118 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6119 && TYPE_VALUES (b->type)
6120 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6121 && type_list_equal (TYPE_VALUES (a->type),
6122 TYPE_VALUES (b->type))))
6123 return 0;
6125 /* fall through */
6127 case INTEGER_TYPE:
6128 case REAL_TYPE:
6129 case BOOLEAN_TYPE:
6130 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6131 return false;
6132 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6133 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6134 TYPE_MAX_VALUE (b->type)))
6135 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6136 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6137 TYPE_MIN_VALUE (b->type))));
6139 case FIXED_POINT_TYPE:
6140 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6142 case OFFSET_TYPE:
6143 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6145 case METHOD_TYPE:
6146 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6147 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6148 || (TYPE_ARG_TYPES (a->type)
6149 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6150 && TYPE_ARG_TYPES (b->type)
6151 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6152 && type_list_equal (TYPE_ARG_TYPES (a->type),
6153 TYPE_ARG_TYPES (b->type)))))
6154 break;
6155 return 0;
6156 case ARRAY_TYPE:
6157 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6158 where the flag should be inherited from the element type
6159 and can change after ARRAY_TYPEs are created; on non-aggregates
6160 compare it and hash it, scalars will never have that flag set
6161 and we need to differentiate between arrays created by different
6162 front-ends or middle-end created arrays. */
6163 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6164 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6165 || (TYPE_TYPELESS_STORAGE (a->type)
6166 == TYPE_TYPELESS_STORAGE (b->type))));
6168 case RECORD_TYPE:
6169 case UNION_TYPE:
6170 case QUAL_UNION_TYPE:
6171 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6172 || (TYPE_FIELDS (a->type)
6173 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6174 && TYPE_FIELDS (b->type)
6175 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6176 && type_list_equal (TYPE_FIELDS (a->type),
6177 TYPE_FIELDS (b->type))));
6179 case FUNCTION_TYPE:
6180 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6181 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6182 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6183 || (TYPE_ARG_TYPES (a->type)
6184 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6185 && TYPE_ARG_TYPES (b->type)
6186 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6187 && type_list_equal (TYPE_ARG_TYPES (a->type),
6188 TYPE_ARG_TYPES (b->type))))
6189 break;
6190 return 0;
6192 default:
6193 return 0;
6196 if (lang_hooks.types.type_hash_eq != NULL)
6197 return lang_hooks.types.type_hash_eq (a->type, b->type);
6199 return 1;
6202 /* Given TYPE, and HASHCODE its hash code, return the canonical
6203 object for an identical type if one already exists.
6204 Otherwise, return TYPE, and record it as the canonical object.
6206 To use this function, first create a type of the sort you want.
6207 Then compute its hash code from the fields of the type that
6208 make it different from other similar types.
6209 Then call this function and use the value. */
6211 tree
6212 type_hash_canon (unsigned int hashcode, tree type)
6214 type_hash in;
6215 type_hash **loc;
6217 /* The hash table only contains main variants, so ensure that's what we're
6218 being passed. */
6219 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6221 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6222 must call that routine before comparing TYPE_ALIGNs. */
6223 layout_type (type);
6225 in.hash = hashcode;
6226 in.type = type;
6228 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6229 if (*loc)
6231 tree t1 = ((type_hash *) *loc)->type;
6232 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6233 && t1 != type);
6234 if (TYPE_UID (type) + 1 == next_type_uid)
6235 --next_type_uid;
6236 /* Free also min/max values and the cache for integer
6237 types. This can't be done in free_node, as LTO frees
6238 those on its own. */
6239 if (TREE_CODE (type) == INTEGER_TYPE)
6241 if (TYPE_MIN_VALUE (type)
6242 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6244 /* Zero is always in TYPE_CACHED_VALUES. */
6245 if (! TYPE_UNSIGNED (type))
6246 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6247 ggc_free (TYPE_MIN_VALUE (type));
6249 if (TYPE_MAX_VALUE (type)
6250 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6252 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6253 ggc_free (TYPE_MAX_VALUE (type));
6255 if (TYPE_CACHED_VALUES_P (type))
6256 ggc_free (TYPE_CACHED_VALUES (type));
6258 free_node (type);
6259 return t1;
6261 else
6263 struct type_hash *h;
6265 h = ggc_alloc<type_hash> ();
6266 h->hash = hashcode;
6267 h->type = type;
6268 *loc = h;
6270 return type;
6274 static void
6275 print_type_hash_statistics (void)
6277 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6278 (long) type_hash_table->size (),
6279 (long) type_hash_table->elements (),
6280 type_hash_table->collisions ());
6283 /* Given two lists of types
6284 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6285 return 1 if the lists contain the same types in the same order.
6286 Also, the TREE_PURPOSEs must match. */
6288 bool
6289 type_list_equal (const_tree l1, const_tree l2)
6291 const_tree t1, t2;
6293 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6294 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6295 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6296 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6297 && (TREE_TYPE (TREE_PURPOSE (t1))
6298 == TREE_TYPE (TREE_PURPOSE (t2))))))
6299 return false;
6301 return t1 == t2;
6304 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6305 given by TYPE. If the argument list accepts variable arguments,
6306 then this function counts only the ordinary arguments. */
6309 type_num_arguments (const_tree fntype)
6311 int i = 0;
6313 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6314 /* If the function does not take a variable number of arguments,
6315 the last element in the list will have type `void'. */
6316 if (VOID_TYPE_P (TREE_VALUE (t)))
6317 break;
6318 else
6319 ++i;
6321 return i;
6324 /* Return the type of the function TYPE's argument ARGNO if known.
6325 For vararg function's where ARGNO refers to one of the variadic
6326 arguments return null. Otherwise, return a void_type_node for
6327 out-of-bounds ARGNO. */
6329 tree
6330 type_argument_type (const_tree fntype, unsigned argno)
6332 /* Treat zero the same as an out-of-bounds argument number. */
6333 if (!argno)
6334 return void_type_node;
6336 function_args_iterator iter;
6338 tree argtype;
6339 unsigned i = 1;
6340 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6342 /* A vararg function's argument list ends in a null. Otherwise,
6343 an ordinary function's argument list ends with void. Return
6344 null if ARGNO refers to a vararg argument, void_type_node if
6345 it's out of bounds, and the formal argument type otherwise. */
6346 if (!argtype)
6347 break;
6349 if (i == argno || VOID_TYPE_P (argtype))
6350 return argtype;
6352 ++i;
6355 return NULL_TREE;
6358 /* Nonzero if integer constants T1 and T2
6359 represent the same constant value. */
6362 tree_int_cst_equal (const_tree t1, const_tree t2)
6364 if (t1 == t2)
6365 return 1;
6367 if (t1 == 0 || t2 == 0)
6368 return 0;
6370 STRIP_ANY_LOCATION_WRAPPER (t1);
6371 STRIP_ANY_LOCATION_WRAPPER (t2);
6373 if (TREE_CODE (t1) == INTEGER_CST
6374 && TREE_CODE (t2) == INTEGER_CST
6375 && wi::to_widest (t1) == wi::to_widest (t2))
6376 return 1;
6378 return 0;
6381 /* Return true if T is an INTEGER_CST whose numerical value (extended
6382 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6384 bool
6385 tree_fits_shwi_p (const_tree t)
6387 return (t != NULL_TREE
6388 && TREE_CODE (t) == INTEGER_CST
6389 && wi::fits_shwi_p (wi::to_widest (t)));
6392 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6393 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6395 bool
6396 tree_fits_poly_int64_p (const_tree t)
6398 if (t == NULL_TREE)
6399 return false;
6400 if (POLY_INT_CST_P (t))
6402 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6403 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6404 return false;
6405 return true;
6407 return (TREE_CODE (t) == INTEGER_CST
6408 && wi::fits_shwi_p (wi::to_widest (t)));
6411 /* Return true if T is an INTEGER_CST whose numerical value (extended
6412 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6414 bool
6415 tree_fits_uhwi_p (const_tree t)
6417 return (t != NULL_TREE
6418 && TREE_CODE (t) == INTEGER_CST
6419 && wi::fits_uhwi_p (wi::to_widest (t)));
6422 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6423 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6425 bool
6426 tree_fits_poly_uint64_p (const_tree t)
6428 if (t == NULL_TREE)
6429 return false;
6430 if (POLY_INT_CST_P (t))
6432 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6433 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6434 return false;
6435 return true;
6437 return (TREE_CODE (t) == INTEGER_CST
6438 && wi::fits_uhwi_p (wi::to_widest (t)));
6441 /* T is an INTEGER_CST whose numerical value (extended according to
6442 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6443 HOST_WIDE_INT. */
6445 HOST_WIDE_INT
6446 tree_to_shwi (const_tree t)
6448 gcc_assert (tree_fits_shwi_p (t));
6449 return TREE_INT_CST_LOW (t);
6452 /* T is an INTEGER_CST whose numerical value (extended according to
6453 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6454 HOST_WIDE_INT. */
6456 unsigned HOST_WIDE_INT
6457 tree_to_uhwi (const_tree t)
6459 gcc_assert (tree_fits_uhwi_p (t));
6460 return TREE_INT_CST_LOW (t);
6463 /* Return the most significant (sign) bit of T. */
6466 tree_int_cst_sign_bit (const_tree t)
6468 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6470 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6473 /* Return an indication of the sign of the integer constant T.
6474 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6475 Note that -1 will never be returned if T's type is unsigned. */
6478 tree_int_cst_sgn (const_tree t)
6480 if (wi::to_wide (t) == 0)
6481 return 0;
6482 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6483 return 1;
6484 else if (wi::neg_p (wi::to_wide (t)))
6485 return -1;
6486 else
6487 return 1;
6490 /* Return the minimum number of bits needed to represent VALUE in a
6491 signed or unsigned type, UNSIGNEDP says which. */
6493 unsigned int
6494 tree_int_cst_min_precision (tree value, signop sgn)
6496 /* If the value is negative, compute its negative minus 1. The latter
6497 adjustment is because the absolute value of the largest negative value
6498 is one larger than the largest positive value. This is equivalent to
6499 a bit-wise negation, so use that operation instead. */
6501 if (tree_int_cst_sgn (value) < 0)
6502 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6504 /* Return the number of bits needed, taking into account the fact
6505 that we need one more bit for a signed than unsigned type.
6506 If value is 0 or -1, the minimum precision is 1 no matter
6507 whether unsignedp is true or false. */
6509 if (integer_zerop (value))
6510 return 1;
6511 else
6512 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6515 /* Return truthvalue of whether T1 is the same tree structure as T2.
6516 Return 1 if they are the same.
6517 Return 0 if they are understandably different.
6518 Return -1 if either contains tree structure not understood by
6519 this function. */
6522 simple_cst_equal (const_tree t1, const_tree t2)
6524 enum tree_code code1, code2;
6525 int cmp;
6526 int i;
6528 if (t1 == t2)
6529 return 1;
6530 if (t1 == 0 || t2 == 0)
6531 return 0;
6533 /* For location wrappers to be the same, they must be at the same
6534 source location (and wrap the same thing). */
6535 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6537 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6538 return 0;
6539 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6542 code1 = TREE_CODE (t1);
6543 code2 = TREE_CODE (t2);
6545 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6547 if (CONVERT_EXPR_CODE_P (code2)
6548 || code2 == NON_LVALUE_EXPR)
6549 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6550 else
6551 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6554 else if (CONVERT_EXPR_CODE_P (code2)
6555 || code2 == NON_LVALUE_EXPR)
6556 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6558 if (code1 != code2)
6559 return 0;
6561 switch (code1)
6563 case INTEGER_CST:
6564 return wi::to_widest (t1) == wi::to_widest (t2);
6566 case REAL_CST:
6567 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6569 case FIXED_CST:
6570 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6572 case STRING_CST:
6573 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6574 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6575 TREE_STRING_LENGTH (t1)));
6577 case CONSTRUCTOR:
6579 unsigned HOST_WIDE_INT idx;
6580 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6581 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6583 if (vec_safe_length (v1) != vec_safe_length (v2))
6584 return false;
6586 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6587 /* ??? Should we handle also fields here? */
6588 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6589 return false;
6590 return true;
6593 case SAVE_EXPR:
6594 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6596 case CALL_EXPR:
6597 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6598 if (cmp <= 0)
6599 return cmp;
6600 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6601 return 0;
6603 const_tree arg1, arg2;
6604 const_call_expr_arg_iterator iter1, iter2;
6605 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6606 arg2 = first_const_call_expr_arg (t2, &iter2);
6607 arg1 && arg2;
6608 arg1 = next_const_call_expr_arg (&iter1),
6609 arg2 = next_const_call_expr_arg (&iter2))
6611 cmp = simple_cst_equal (arg1, arg2);
6612 if (cmp <= 0)
6613 return cmp;
6615 return arg1 == arg2;
6618 case TARGET_EXPR:
6619 /* Special case: if either target is an unallocated VAR_DECL,
6620 it means that it's going to be unified with whatever the
6621 TARGET_EXPR is really supposed to initialize, so treat it
6622 as being equivalent to anything. */
6623 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6624 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6625 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6626 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6627 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6628 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6629 cmp = 1;
6630 else
6631 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6633 if (cmp <= 0)
6634 return cmp;
6636 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6638 case WITH_CLEANUP_EXPR:
6639 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6640 if (cmp <= 0)
6641 return cmp;
6643 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6645 case COMPONENT_REF:
6646 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6647 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6649 return 0;
6651 case VAR_DECL:
6652 case PARM_DECL:
6653 case CONST_DECL:
6654 case FUNCTION_DECL:
6655 return 0;
6657 default:
6658 if (POLY_INT_CST_P (t1))
6659 /* A false return means maybe_ne rather than known_ne. */
6660 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6661 TYPE_SIGN (TREE_TYPE (t1))),
6662 poly_widest_int::from (poly_int_cst_value (t2),
6663 TYPE_SIGN (TREE_TYPE (t2))));
6664 break;
6667 /* This general rule works for most tree codes. All exceptions should be
6668 handled above. If this is a language-specific tree code, we can't
6669 trust what might be in the operand, so say we don't know
6670 the situation. */
6671 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6672 return -1;
6674 switch (TREE_CODE_CLASS (code1))
6676 case tcc_unary:
6677 case tcc_binary:
6678 case tcc_comparison:
6679 case tcc_expression:
6680 case tcc_reference:
6681 case tcc_statement:
6682 cmp = 1;
6683 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6685 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6686 if (cmp <= 0)
6687 return cmp;
6690 return cmp;
6692 default:
6693 return -1;
6697 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6698 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6699 than U, respectively. */
6702 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6704 if (tree_int_cst_sgn (t) < 0)
6705 return -1;
6706 else if (!tree_fits_uhwi_p (t))
6707 return 1;
6708 else if (TREE_INT_CST_LOW (t) == u)
6709 return 0;
6710 else if (TREE_INT_CST_LOW (t) < u)
6711 return -1;
6712 else
6713 return 1;
6716 /* Return true if SIZE represents a constant size that is in bounds of
6717 what the middle-end and the backend accepts (covering not more than
6718 half of the address-space).
6719 When PERR is non-null, set *PERR on failure to the description of
6720 why SIZE is not valid. */
6722 bool
6723 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6725 if (POLY_INT_CST_P (size))
6727 if (TREE_OVERFLOW (size))
6728 return false;
6729 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6730 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6731 return false;
6732 return true;
6735 cst_size_error error;
6736 if (!perr)
6737 perr = &error;
6739 if (TREE_CODE (size) != INTEGER_CST)
6741 *perr = cst_size_not_constant;
6742 return false;
6745 if (TREE_OVERFLOW_P (size))
6747 *perr = cst_size_overflow;
6748 return false;
6751 if (tree_int_cst_sgn (size) < 0)
6753 *perr = cst_size_negative;
6754 return false;
6756 if (!tree_fits_uhwi_p (size)
6757 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6758 < wi::to_widest (size) * 2))
6760 *perr = cst_size_too_big;
6761 return false;
6764 return true;
6767 /* Return the precision of the type, or for a complex or vector type the
6768 precision of the type of its elements. */
6770 unsigned int
6771 element_precision (const_tree type)
6773 if (!TYPE_P (type))
6774 type = TREE_TYPE (type);
6775 enum tree_code code = TREE_CODE (type);
6776 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6777 type = TREE_TYPE (type);
6779 return TYPE_PRECISION (type);
6782 /* Return true if CODE represents an associative tree code. Otherwise
6783 return false. */
6784 bool
6785 associative_tree_code (enum tree_code code)
6787 switch (code)
6789 case BIT_IOR_EXPR:
6790 case BIT_AND_EXPR:
6791 case BIT_XOR_EXPR:
6792 case PLUS_EXPR:
6793 case MULT_EXPR:
6794 case MIN_EXPR:
6795 case MAX_EXPR:
6796 return true;
6798 default:
6799 break;
6801 return false;
6804 /* Return true if CODE represents a commutative tree code. Otherwise
6805 return false. */
6806 bool
6807 commutative_tree_code (enum tree_code code)
6809 switch (code)
6811 case PLUS_EXPR:
6812 case MULT_EXPR:
6813 case MULT_HIGHPART_EXPR:
6814 case MIN_EXPR:
6815 case MAX_EXPR:
6816 case BIT_IOR_EXPR:
6817 case BIT_XOR_EXPR:
6818 case BIT_AND_EXPR:
6819 case NE_EXPR:
6820 case EQ_EXPR:
6821 case UNORDERED_EXPR:
6822 case ORDERED_EXPR:
6823 case UNEQ_EXPR:
6824 case LTGT_EXPR:
6825 case TRUTH_AND_EXPR:
6826 case TRUTH_XOR_EXPR:
6827 case TRUTH_OR_EXPR:
6828 case WIDEN_MULT_EXPR:
6829 case VEC_WIDEN_MULT_HI_EXPR:
6830 case VEC_WIDEN_MULT_LO_EXPR:
6831 case VEC_WIDEN_MULT_EVEN_EXPR:
6832 case VEC_WIDEN_MULT_ODD_EXPR:
6833 return true;
6835 default:
6836 break;
6838 return false;
6841 /* Return true if CODE represents a ternary tree code for which the
6842 first two operands are commutative. Otherwise return false. */
6843 bool
6844 commutative_ternary_tree_code (enum tree_code code)
6846 switch (code)
6848 case WIDEN_MULT_PLUS_EXPR:
6849 case WIDEN_MULT_MINUS_EXPR:
6850 case DOT_PROD_EXPR:
6851 return true;
6853 default:
6854 break;
6856 return false;
6859 /* Returns true if CODE can overflow. */
6861 bool
6862 operation_can_overflow (enum tree_code code)
6864 switch (code)
6866 case PLUS_EXPR:
6867 case MINUS_EXPR:
6868 case MULT_EXPR:
6869 case LSHIFT_EXPR:
6870 /* Can overflow in various ways. */
6871 return true;
6872 case TRUNC_DIV_EXPR:
6873 case EXACT_DIV_EXPR:
6874 case FLOOR_DIV_EXPR:
6875 case CEIL_DIV_EXPR:
6876 /* For INT_MIN / -1. */
6877 return true;
6878 case NEGATE_EXPR:
6879 case ABS_EXPR:
6880 /* For -INT_MIN. */
6881 return true;
6882 default:
6883 /* These operators cannot overflow. */
6884 return false;
6888 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6889 ftrapv doesn't generate trapping insns for CODE. */
6891 bool
6892 operation_no_trapping_overflow (tree type, enum tree_code code)
6894 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6896 /* We don't generate instructions that trap on overflow for complex or vector
6897 types. */
6898 if (!INTEGRAL_TYPE_P (type))
6899 return true;
6901 if (!TYPE_OVERFLOW_TRAPS (type))
6902 return true;
6904 switch (code)
6906 case PLUS_EXPR:
6907 case MINUS_EXPR:
6908 case MULT_EXPR:
6909 case NEGATE_EXPR:
6910 case ABS_EXPR:
6911 /* These operators can overflow, and -ftrapv generates trapping code for
6912 these. */
6913 return false;
6914 case TRUNC_DIV_EXPR:
6915 case EXACT_DIV_EXPR:
6916 case FLOOR_DIV_EXPR:
6917 case CEIL_DIV_EXPR:
6918 case LSHIFT_EXPR:
6919 /* These operators can overflow, but -ftrapv does not generate trapping
6920 code for these. */
6921 return true;
6922 default:
6923 /* These operators cannot overflow. */
6924 return true;
6928 /* Constructors for pointer, array and function types.
6929 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6930 constructed by language-dependent code, not here.) */
6932 /* Construct, lay out and return the type of pointers to TO_TYPE with
6933 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6934 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6935 indicate this type can reference all of memory. If such a type has
6936 already been constructed, reuse it. */
6938 tree
6939 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6940 bool can_alias_all)
6942 tree t;
6943 bool could_alias = can_alias_all;
6945 if (to_type == error_mark_node)
6946 return error_mark_node;
6948 if (mode == VOIDmode)
6950 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6951 mode = targetm.addr_space.pointer_mode (as);
6954 /* If the pointed-to type has the may_alias attribute set, force
6955 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6956 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6957 can_alias_all = true;
6959 /* In some cases, languages will have things that aren't a POINTER_TYPE
6960 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6961 In that case, return that type without regard to the rest of our
6962 operands.
6964 ??? This is a kludge, but consistent with the way this function has
6965 always operated and there doesn't seem to be a good way to avoid this
6966 at the moment. */
6967 if (TYPE_POINTER_TO (to_type) != 0
6968 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6969 return TYPE_POINTER_TO (to_type);
6971 /* First, if we already have a type for pointers to TO_TYPE and it's
6972 the proper mode, use it. */
6973 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6974 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6975 return t;
6977 t = make_node (POINTER_TYPE);
6979 TREE_TYPE (t) = to_type;
6980 SET_TYPE_MODE (t, mode);
6981 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6982 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6983 TYPE_POINTER_TO (to_type) = t;
6985 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6986 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6987 SET_TYPE_STRUCTURAL_EQUALITY (t);
6988 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6989 TYPE_CANONICAL (t)
6990 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6991 mode, false);
6993 /* Lay out the type. This function has many callers that are concerned
6994 with expression-construction, and this simplifies them all. */
6995 layout_type (t);
6997 return t;
7000 /* By default build pointers in ptr_mode. */
7002 tree
7003 build_pointer_type (tree to_type)
7005 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7008 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7010 tree
7011 build_reference_type_for_mode (tree to_type, machine_mode mode,
7012 bool can_alias_all)
7014 tree t;
7015 bool could_alias = can_alias_all;
7017 if (to_type == error_mark_node)
7018 return error_mark_node;
7020 if (mode == VOIDmode)
7022 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7023 mode = targetm.addr_space.pointer_mode (as);
7026 /* If the pointed-to type has the may_alias attribute set, force
7027 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7028 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7029 can_alias_all = true;
7031 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7032 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7033 In that case, return that type without regard to the rest of our
7034 operands.
7036 ??? This is a kludge, but consistent with the way this function has
7037 always operated and there doesn't seem to be a good way to avoid this
7038 at the moment. */
7039 if (TYPE_REFERENCE_TO (to_type) != 0
7040 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7041 return TYPE_REFERENCE_TO (to_type);
7043 /* First, if we already have a type for pointers to TO_TYPE and it's
7044 the proper mode, use it. */
7045 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7046 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7047 return t;
7049 t = make_node (REFERENCE_TYPE);
7051 TREE_TYPE (t) = to_type;
7052 SET_TYPE_MODE (t, mode);
7053 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7054 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7055 TYPE_REFERENCE_TO (to_type) = t;
7057 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7058 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7059 SET_TYPE_STRUCTURAL_EQUALITY (t);
7060 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7061 TYPE_CANONICAL (t)
7062 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7063 mode, false);
7065 layout_type (t);
7067 return t;
7071 /* Build the node for the type of references-to-TO_TYPE by default
7072 in ptr_mode. */
7074 tree
7075 build_reference_type (tree to_type)
7077 return build_reference_type_for_mode (to_type, VOIDmode, false);
7080 #define MAX_INT_CACHED_PREC \
7081 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7082 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7084 static void
7085 clear_nonstandard_integer_type_cache (void)
7087 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7089 nonstandard_integer_type_cache[i] = NULL;
7093 /* Builds a signed or unsigned integer type of precision PRECISION.
7094 Used for C bitfields whose precision does not match that of
7095 built-in target types. */
7096 tree
7097 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7098 int unsignedp)
7100 tree itype, ret;
7102 if (unsignedp)
7103 unsignedp = MAX_INT_CACHED_PREC + 1;
7105 if (precision <= MAX_INT_CACHED_PREC)
7107 itype = nonstandard_integer_type_cache[precision + unsignedp];
7108 if (itype)
7109 return itype;
7112 itype = make_node (INTEGER_TYPE);
7113 TYPE_PRECISION (itype) = precision;
7115 if (unsignedp)
7116 fixup_unsigned_type (itype);
7117 else
7118 fixup_signed_type (itype);
7120 inchash::hash hstate;
7121 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7122 ret = type_hash_canon (hstate.end (), itype);
7123 if (precision <= MAX_INT_CACHED_PREC)
7124 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7126 return ret;
7129 #define MAX_BOOL_CACHED_PREC \
7130 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7131 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7133 /* Builds a boolean type of precision PRECISION.
7134 Used for boolean vectors to choose proper vector element size. */
7135 tree
7136 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7138 tree type;
7140 if (precision <= MAX_BOOL_CACHED_PREC)
7142 type = nonstandard_boolean_type_cache[precision];
7143 if (type)
7144 return type;
7147 type = make_node (BOOLEAN_TYPE);
7148 TYPE_PRECISION (type) = precision;
7149 fixup_signed_type (type);
7151 if (precision <= MAX_INT_CACHED_PREC)
7152 nonstandard_boolean_type_cache[precision] = type;
7154 return type;
7157 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7158 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7159 is true, reuse such a type that has already been constructed. */
7161 static tree
7162 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7164 tree itype = make_node (INTEGER_TYPE);
7166 TREE_TYPE (itype) = type;
7168 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7169 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7171 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7172 SET_TYPE_MODE (itype, TYPE_MODE (type));
7173 TYPE_SIZE (itype) = TYPE_SIZE (type);
7174 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7175 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7176 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7177 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7179 if (!shared)
7180 return itype;
7182 if ((TYPE_MIN_VALUE (itype)
7183 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7184 || (TYPE_MAX_VALUE (itype)
7185 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7187 /* Since we cannot reliably merge this type, we need to compare it using
7188 structural equality checks. */
7189 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7190 return itype;
7193 hashval_t hash = type_hash_canon_hash (itype);
7194 itype = type_hash_canon (hash, itype);
7196 return itype;
7199 /* Wrapper around build_range_type_1 with SHARED set to true. */
7201 tree
7202 build_range_type (tree type, tree lowval, tree highval)
7204 return build_range_type_1 (type, lowval, highval, true);
7207 /* Wrapper around build_range_type_1 with SHARED set to false. */
7209 tree
7210 build_nonshared_range_type (tree type, tree lowval, tree highval)
7212 return build_range_type_1 (type, lowval, highval, false);
7215 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7216 MAXVAL should be the maximum value in the domain
7217 (one less than the length of the array).
7219 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7220 We don't enforce this limit, that is up to caller (e.g. language front end).
7221 The limit exists because the result is a signed type and we don't handle
7222 sizes that use more than one HOST_WIDE_INT. */
7224 tree
7225 build_index_type (tree maxval)
7227 return build_range_type (sizetype, size_zero_node, maxval);
7230 /* Return true if the debug information for TYPE, a subtype, should be emitted
7231 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7232 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7233 debug info and doesn't reflect the source code. */
7235 bool
7236 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7238 tree base_type = TREE_TYPE (type), low, high;
7240 /* Subrange types have a base type which is an integral type. */
7241 if (!INTEGRAL_TYPE_P (base_type))
7242 return false;
7244 /* Get the real bounds of the subtype. */
7245 if (lang_hooks.types.get_subrange_bounds)
7246 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7247 else
7249 low = TYPE_MIN_VALUE (type);
7250 high = TYPE_MAX_VALUE (type);
7253 /* If the type and its base type have the same representation and the same
7254 name, then the type is not a subrange but a copy of the base type. */
7255 if ((TREE_CODE (base_type) == INTEGER_TYPE
7256 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7257 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7258 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7259 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7260 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7261 return false;
7263 if (lowval)
7264 *lowval = low;
7265 if (highval)
7266 *highval = high;
7267 return true;
7270 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7271 and number of elements specified by the range of values of INDEX_TYPE.
7272 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7273 If SHARED is true, reuse such a type that has already been constructed.
7274 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7276 tree
7277 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7278 bool shared, bool set_canonical)
7280 tree t;
7282 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7284 error ("arrays of functions are not meaningful");
7285 elt_type = integer_type_node;
7288 t = make_node (ARRAY_TYPE);
7289 TREE_TYPE (t) = elt_type;
7290 TYPE_DOMAIN (t) = index_type;
7291 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7292 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7293 layout_type (t);
7295 if (shared)
7297 hashval_t hash = type_hash_canon_hash (t);
7298 t = type_hash_canon (hash, t);
7301 if (TYPE_CANONICAL (t) == t && set_canonical)
7303 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7304 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7305 || in_lto_p)
7306 SET_TYPE_STRUCTURAL_EQUALITY (t);
7307 else if (TYPE_CANONICAL (elt_type) != elt_type
7308 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7309 TYPE_CANONICAL (t)
7310 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7311 index_type
7312 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7313 typeless_storage, shared, set_canonical);
7316 return t;
7319 /* Wrapper around build_array_type_1 with SHARED set to true. */
7321 tree
7322 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7324 return
7325 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7328 /* Wrapper around build_array_type_1 with SHARED set to false. */
7330 tree
7331 build_nonshared_array_type (tree elt_type, tree index_type)
7333 return build_array_type_1 (elt_type, index_type, false, false, true);
7336 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7337 sizetype. */
7339 tree
7340 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7342 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7345 /* Computes the canonical argument types from the argument type list
7346 ARGTYPES.
7348 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7349 on entry to this function, or if any of the ARGTYPES are
7350 structural.
7352 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7353 true on entry to this function, or if any of the ARGTYPES are
7354 non-canonical.
7356 Returns a canonical argument list, which may be ARGTYPES when the
7357 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7358 true) or would not differ from ARGTYPES. */
7360 static tree
7361 maybe_canonicalize_argtypes (tree argtypes,
7362 bool *any_structural_p,
7363 bool *any_noncanonical_p)
7365 tree arg;
7366 bool any_noncanonical_argtypes_p = false;
7368 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7370 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7371 /* Fail gracefully by stating that the type is structural. */
7372 *any_structural_p = true;
7373 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7374 *any_structural_p = true;
7375 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7376 || TREE_PURPOSE (arg))
7377 /* If the argument has a default argument, we consider it
7378 non-canonical even though the type itself is canonical.
7379 That way, different variants of function and method types
7380 with default arguments will all point to the variant with
7381 no defaults as their canonical type. */
7382 any_noncanonical_argtypes_p = true;
7385 if (*any_structural_p)
7386 return argtypes;
7388 if (any_noncanonical_argtypes_p)
7390 /* Build the canonical list of argument types. */
7391 tree canon_argtypes = NULL_TREE;
7392 bool is_void = false;
7394 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7396 if (arg == void_list_node)
7397 is_void = true;
7398 else
7399 canon_argtypes = tree_cons (NULL_TREE,
7400 TYPE_CANONICAL (TREE_VALUE (arg)),
7401 canon_argtypes);
7404 canon_argtypes = nreverse (canon_argtypes);
7405 if (is_void)
7406 canon_argtypes = chainon (canon_argtypes, void_list_node);
7408 /* There is a non-canonical type. */
7409 *any_noncanonical_p = true;
7410 return canon_argtypes;
7413 /* The canonical argument types are the same as ARGTYPES. */
7414 return argtypes;
7417 /* Construct, lay out and return
7418 the type of functions returning type VALUE_TYPE
7419 given arguments of types ARG_TYPES.
7420 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7421 are data type nodes for the arguments of the function.
7422 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7423 variable-arguments function with (...) prototype (no named arguments).
7424 If such a type has already been constructed, reuse it. */
7426 tree
7427 build_function_type (tree value_type, tree arg_types,
7428 bool no_named_args_stdarg_p)
7430 tree t;
7431 inchash::hash hstate;
7432 bool any_structural_p, any_noncanonical_p;
7433 tree canon_argtypes;
7435 gcc_assert (arg_types != error_mark_node);
7437 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7439 error ("function return type cannot be function");
7440 value_type = integer_type_node;
7443 /* Make a node of the sort we want. */
7444 t = make_node (FUNCTION_TYPE);
7445 TREE_TYPE (t) = value_type;
7446 TYPE_ARG_TYPES (t) = arg_types;
7447 if (no_named_args_stdarg_p)
7449 gcc_assert (arg_types == NULL_TREE);
7450 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7453 /* If we already have such a type, use the old one. */
7454 hashval_t hash = type_hash_canon_hash (t);
7455 t = type_hash_canon (hash, t);
7457 /* Set up the canonical type. */
7458 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7459 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7460 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7461 &any_structural_p,
7462 &any_noncanonical_p);
7463 if (any_structural_p)
7464 SET_TYPE_STRUCTURAL_EQUALITY (t);
7465 else if (any_noncanonical_p)
7466 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7467 canon_argtypes);
7469 if (!COMPLETE_TYPE_P (t))
7470 layout_type (t);
7471 return t;
7474 /* Build a function type. The RETURN_TYPE is the type returned by the
7475 function. If VAARGS is set, no void_type_node is appended to the
7476 list. ARGP must be always be terminated be a NULL_TREE. */
7478 static tree
7479 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7481 tree t, args, last;
7483 t = va_arg (argp, tree);
7484 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7485 args = tree_cons (NULL_TREE, t, args);
7487 if (vaargs)
7489 last = args;
7490 if (args != NULL_TREE)
7491 args = nreverse (args);
7492 gcc_assert (last != void_list_node);
7494 else if (args == NULL_TREE)
7495 args = void_list_node;
7496 else
7498 last = args;
7499 args = nreverse (args);
7500 TREE_CHAIN (last) = void_list_node;
7502 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7504 return args;
7507 /* Build a function type. The RETURN_TYPE is the type returned by the
7508 function. If additional arguments are provided, they are
7509 additional argument types. The list of argument types must always
7510 be terminated by NULL_TREE. */
7512 tree
7513 build_function_type_list (tree return_type, ...)
7515 tree args;
7516 va_list p;
7518 va_start (p, return_type);
7519 args = build_function_type_list_1 (false, return_type, p);
7520 va_end (p);
7521 return args;
7524 /* Build a variable argument function type. The RETURN_TYPE is the
7525 type returned by the function. If additional arguments are provided,
7526 they are additional argument types. The list of argument types must
7527 always be terminated by NULL_TREE. */
7529 tree
7530 build_varargs_function_type_list (tree return_type, ...)
7532 tree args;
7533 va_list p;
7535 va_start (p, return_type);
7536 args = build_function_type_list_1 (true, return_type, p);
7537 va_end (p);
7539 return args;
7542 /* Build a function type. RETURN_TYPE is the type returned by the
7543 function; VAARGS indicates whether the function takes varargs. The
7544 function takes N named arguments, the types of which are provided in
7545 ARG_TYPES. */
7547 static tree
7548 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7549 tree *arg_types)
7551 int i;
7552 tree t = vaargs ? NULL_TREE : void_list_node;
7554 for (i = n - 1; i >= 0; i--)
7555 t = tree_cons (NULL_TREE, arg_types[i], t);
7557 return build_function_type (return_type, t, vaargs && n == 0);
7560 /* Build a function type. RETURN_TYPE is the type returned by the
7561 function. The function takes N named arguments, the types of which
7562 are provided in ARG_TYPES. */
7564 tree
7565 build_function_type_array (tree return_type, int n, tree *arg_types)
7567 return build_function_type_array_1 (false, return_type, n, arg_types);
7570 /* Build a variable argument function type. RETURN_TYPE is the type
7571 returned by the function. The function takes N named arguments, the
7572 types of which are provided in ARG_TYPES. */
7574 tree
7575 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7577 return build_function_type_array_1 (true, return_type, n, arg_types);
7580 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7581 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7582 for the method. An implicit additional parameter (of type
7583 pointer-to-BASETYPE) is added to the ARGTYPES. */
7585 tree
7586 build_method_type_directly (tree basetype,
7587 tree rettype,
7588 tree argtypes)
7590 tree t;
7591 tree ptype;
7592 bool any_structural_p, any_noncanonical_p;
7593 tree canon_argtypes;
7595 /* Make a node of the sort we want. */
7596 t = make_node (METHOD_TYPE);
7598 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7599 TREE_TYPE (t) = rettype;
7600 ptype = build_pointer_type (basetype);
7602 /* The actual arglist for this function includes a "hidden" argument
7603 which is "this". Put it into the list of argument types. */
7604 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7605 TYPE_ARG_TYPES (t) = argtypes;
7607 /* If we already have such a type, use the old one. */
7608 hashval_t hash = type_hash_canon_hash (t);
7609 t = type_hash_canon (hash, t);
7611 /* Set up the canonical type. */
7612 any_structural_p
7613 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7614 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7615 any_noncanonical_p
7616 = (TYPE_CANONICAL (basetype) != basetype
7617 || TYPE_CANONICAL (rettype) != rettype);
7618 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7619 &any_structural_p,
7620 &any_noncanonical_p);
7621 if (any_structural_p)
7622 SET_TYPE_STRUCTURAL_EQUALITY (t);
7623 else if (any_noncanonical_p)
7624 TYPE_CANONICAL (t)
7625 = build_method_type_directly (TYPE_CANONICAL (basetype),
7626 TYPE_CANONICAL (rettype),
7627 canon_argtypes);
7628 if (!COMPLETE_TYPE_P (t))
7629 layout_type (t);
7631 return t;
7634 /* Construct, lay out and return the type of methods belonging to class
7635 BASETYPE and whose arguments and values are described by TYPE.
7636 If that type exists already, reuse it.
7637 TYPE must be a FUNCTION_TYPE node. */
7639 tree
7640 build_method_type (tree basetype, tree type)
7642 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7644 return build_method_type_directly (basetype,
7645 TREE_TYPE (type),
7646 TYPE_ARG_TYPES (type));
7649 /* Construct, lay out and return the type of offsets to a value
7650 of type TYPE, within an object of type BASETYPE.
7651 If a suitable offset type exists already, reuse it. */
7653 tree
7654 build_offset_type (tree basetype, tree type)
7656 tree t;
7658 /* Make a node of the sort we want. */
7659 t = make_node (OFFSET_TYPE);
7661 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7662 TREE_TYPE (t) = type;
7664 /* If we already have such a type, use the old one. */
7665 hashval_t hash = type_hash_canon_hash (t);
7666 t = type_hash_canon (hash, t);
7668 if (!COMPLETE_TYPE_P (t))
7669 layout_type (t);
7671 if (TYPE_CANONICAL (t) == t)
7673 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7674 || TYPE_STRUCTURAL_EQUALITY_P (type))
7675 SET_TYPE_STRUCTURAL_EQUALITY (t);
7676 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7677 || TYPE_CANONICAL (type) != type)
7678 TYPE_CANONICAL (t)
7679 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7680 TYPE_CANONICAL (type));
7683 return t;
7686 /* Create a complex type whose components are COMPONENT_TYPE.
7688 If NAMED is true, the type is given a TYPE_NAME. We do not always
7689 do so because this creates a DECL node and thus make the DECL_UIDs
7690 dependent on the type canonicalization hashtable, which is GC-ed,
7691 so the DECL_UIDs would not be stable wrt garbage collection. */
7693 tree
7694 build_complex_type (tree component_type, bool named)
7696 gcc_assert (INTEGRAL_TYPE_P (component_type)
7697 || SCALAR_FLOAT_TYPE_P (component_type)
7698 || FIXED_POINT_TYPE_P (component_type));
7700 /* Make a node of the sort we want. */
7701 tree probe = make_node (COMPLEX_TYPE);
7703 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7705 /* If we already have such a type, use the old one. */
7706 hashval_t hash = type_hash_canon_hash (probe);
7707 tree t = type_hash_canon (hash, probe);
7709 if (t == probe)
7711 /* We created a new type. The hash insertion will have laid
7712 out the type. We need to check the canonicalization and
7713 maybe set the name. */
7714 gcc_checking_assert (COMPLETE_TYPE_P (t)
7715 && !TYPE_NAME (t)
7716 && TYPE_CANONICAL (t) == t);
7718 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7719 SET_TYPE_STRUCTURAL_EQUALITY (t);
7720 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7721 TYPE_CANONICAL (t)
7722 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7724 /* We need to create a name, since complex is a fundamental type. */
7725 if (named)
7727 const char *name = NULL;
7729 if (TREE_TYPE (t) == char_type_node)
7730 name = "complex char";
7731 else if (TREE_TYPE (t) == signed_char_type_node)
7732 name = "complex signed char";
7733 else if (TREE_TYPE (t) == unsigned_char_type_node)
7734 name = "complex unsigned char";
7735 else if (TREE_TYPE (t) == short_integer_type_node)
7736 name = "complex short int";
7737 else if (TREE_TYPE (t) == short_unsigned_type_node)
7738 name = "complex short unsigned int";
7739 else if (TREE_TYPE (t) == integer_type_node)
7740 name = "complex int";
7741 else if (TREE_TYPE (t) == unsigned_type_node)
7742 name = "complex unsigned int";
7743 else if (TREE_TYPE (t) == long_integer_type_node)
7744 name = "complex long int";
7745 else if (TREE_TYPE (t) == long_unsigned_type_node)
7746 name = "complex long unsigned int";
7747 else if (TREE_TYPE (t) == long_long_integer_type_node)
7748 name = "complex long long int";
7749 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7750 name = "complex long long unsigned int";
7752 if (name != NULL)
7753 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7754 get_identifier (name), t);
7758 return build_qualified_type (t, TYPE_QUALS (component_type));
7761 /* If TYPE is a real or complex floating-point type and the target
7762 does not directly support arithmetic on TYPE then return the wider
7763 type to be used for arithmetic on TYPE. Otherwise, return
7764 NULL_TREE. */
7766 tree
7767 excess_precision_type (tree type)
7769 /* The target can give two different responses to the question of
7770 which excess precision mode it would like depending on whether we
7771 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7773 enum excess_precision_type requested_type
7774 = (flag_excess_precision == EXCESS_PRECISION_FAST
7775 ? EXCESS_PRECISION_TYPE_FAST
7776 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7777 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7779 enum flt_eval_method target_flt_eval_method
7780 = targetm.c.excess_precision (requested_type);
7782 /* The target should not ask for unpredictable float evaluation (though
7783 it might advertise that implicitly the evaluation is unpredictable,
7784 but we don't care about that here, it will have been reported
7785 elsewhere). If it does ask for unpredictable evaluation, we have
7786 nothing to do here. */
7787 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7789 /* Nothing to do. The target has asked for all types we know about
7790 to be computed with their native precision and range. */
7791 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7792 return NULL_TREE;
7794 /* The target will promote this type in a target-dependent way, so excess
7795 precision ought to leave it alone. */
7796 if (targetm.promoted_type (type) != NULL_TREE)
7797 return NULL_TREE;
7799 machine_mode float16_type_mode = (float16_type_node
7800 ? TYPE_MODE (float16_type_node)
7801 : VOIDmode);
7802 machine_mode bfloat16_type_mode = (bfloat16_type_node
7803 ? TYPE_MODE (bfloat16_type_node)
7804 : VOIDmode);
7805 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7806 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7808 switch (TREE_CODE (type))
7810 case REAL_TYPE:
7812 machine_mode type_mode = TYPE_MODE (type);
7813 switch (target_flt_eval_method)
7815 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7816 if (type_mode == float16_type_mode
7817 || type_mode == bfloat16_type_mode)
7818 return float_type_node;
7819 break;
7820 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7821 if (type_mode == float16_type_mode
7822 || type_mode == bfloat16_type_mode
7823 || type_mode == float_type_mode)
7824 return double_type_node;
7825 break;
7826 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7827 if (type_mode == float16_type_mode
7828 || type_mode == bfloat16_type_mode
7829 || type_mode == float_type_mode
7830 || type_mode == double_type_mode)
7831 return long_double_type_node;
7832 break;
7833 default:
7834 gcc_unreachable ();
7836 break;
7838 case COMPLEX_TYPE:
7840 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7841 return NULL_TREE;
7842 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7843 switch (target_flt_eval_method)
7845 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7846 if (type_mode == float16_type_mode
7847 || type_mode == bfloat16_type_mode)
7848 return complex_float_type_node;
7849 break;
7850 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7851 if (type_mode == float16_type_mode
7852 || type_mode == bfloat16_type_mode
7853 || type_mode == float_type_mode)
7854 return complex_double_type_node;
7855 break;
7856 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7857 if (type_mode == float16_type_mode
7858 || type_mode == bfloat16_type_mode
7859 || type_mode == float_type_mode
7860 || type_mode == double_type_mode)
7861 return complex_long_double_type_node;
7862 break;
7863 default:
7864 gcc_unreachable ();
7866 break;
7868 default:
7869 break;
7872 return NULL_TREE;
7875 /* Return OP, stripped of any conversions to wider types as much as is safe.
7876 Converting the value back to OP's type makes a value equivalent to OP.
7878 If FOR_TYPE is nonzero, we return a value which, if converted to
7879 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7881 OP must have integer, real or enumeral type. Pointers are not allowed!
7883 There are some cases where the obvious value we could return
7884 would regenerate to OP if converted to OP's type,
7885 but would not extend like OP to wider types.
7886 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7887 For example, if OP is (unsigned short)(signed char)-1,
7888 we avoid returning (signed char)-1 if FOR_TYPE is int,
7889 even though extending that to an unsigned short would regenerate OP,
7890 since the result of extending (signed char)-1 to (int)
7891 is different from (int) OP. */
7893 tree
7894 get_unwidened (tree op, tree for_type)
7896 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7897 tree type = TREE_TYPE (op);
7898 unsigned final_prec
7899 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7900 int uns
7901 = (for_type != 0 && for_type != type
7902 && final_prec > TYPE_PRECISION (type)
7903 && TYPE_UNSIGNED (type));
7904 tree win = op;
7906 while (CONVERT_EXPR_P (op))
7908 int bitschange;
7910 /* TYPE_PRECISION on vector types has different meaning
7911 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7912 so avoid them here. */
7913 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7914 break;
7916 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7917 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7919 /* Truncations are many-one so cannot be removed.
7920 Unless we are later going to truncate down even farther. */
7921 if (bitschange < 0
7922 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7923 break;
7925 /* See what's inside this conversion. If we decide to strip it,
7926 we will set WIN. */
7927 op = TREE_OPERAND (op, 0);
7929 /* If we have not stripped any zero-extensions (uns is 0),
7930 we can strip any kind of extension.
7931 If we have previously stripped a zero-extension,
7932 only zero-extensions can safely be stripped.
7933 Any extension can be stripped if the bits it would produce
7934 are all going to be discarded later by truncating to FOR_TYPE. */
7936 if (bitschange > 0)
7938 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7939 win = op;
7940 /* TYPE_UNSIGNED says whether this is a zero-extension.
7941 Let's avoid computing it if it does not affect WIN
7942 and if UNS will not be needed again. */
7943 if ((uns
7944 || CONVERT_EXPR_P (op))
7945 && TYPE_UNSIGNED (TREE_TYPE (op)))
7947 uns = 1;
7948 win = op;
7953 /* If we finally reach a constant see if it fits in sth smaller and
7954 in that case convert it. */
7955 if (TREE_CODE (win) == INTEGER_CST)
7957 tree wtype = TREE_TYPE (win);
7958 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7959 if (for_type)
7960 prec = MAX (prec, final_prec);
7961 if (prec < TYPE_PRECISION (wtype))
7963 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7964 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7965 win = fold_convert (t, win);
7969 return win;
7972 /* Return OP or a simpler expression for a narrower value
7973 which can be sign-extended or zero-extended to give back OP.
7974 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7975 or 0 if the value should be sign-extended. */
7977 tree
7978 get_narrower (tree op, int *unsignedp_ptr)
7980 int uns = 0;
7981 int first = 1;
7982 tree win = op;
7983 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7985 if (TREE_CODE (op) == COMPOUND_EXPR)
7988 op = TREE_OPERAND (op, 1);
7989 while (TREE_CODE (op) == COMPOUND_EXPR);
7990 tree ret = get_narrower (op, unsignedp_ptr);
7991 if (ret == op)
7992 return win;
7993 auto_vec <tree, 16> v;
7994 unsigned int i;
7995 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7996 op = TREE_OPERAND (op, 1))
7997 v.safe_push (op);
7998 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7999 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8000 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8001 ret);
8002 return ret;
8004 while (TREE_CODE (op) == NOP_EXPR)
8006 int bitschange
8007 = (TYPE_PRECISION (TREE_TYPE (op))
8008 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8010 /* Truncations are many-one so cannot be removed. */
8011 if (bitschange < 0)
8012 break;
8014 /* See what's inside this conversion. If we decide to strip it,
8015 we will set WIN. */
8017 if (bitschange > 0)
8019 op = TREE_OPERAND (op, 0);
8020 /* An extension: the outermost one can be stripped,
8021 but remember whether it is zero or sign extension. */
8022 if (first)
8023 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8024 /* Otherwise, if a sign extension has been stripped,
8025 only sign extensions can now be stripped;
8026 if a zero extension has been stripped, only zero-extensions. */
8027 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8028 break;
8029 first = 0;
8031 else /* bitschange == 0 */
8033 /* A change in nominal type can always be stripped, but we must
8034 preserve the unsignedness. */
8035 if (first)
8036 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8037 first = 0;
8038 op = TREE_OPERAND (op, 0);
8039 /* Keep trying to narrow, but don't assign op to win if it
8040 would turn an integral type into something else. */
8041 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8042 continue;
8045 win = op;
8048 if (TREE_CODE (op) == COMPONENT_REF
8049 /* Since type_for_size always gives an integer type. */
8050 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8051 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8052 /* Ensure field is laid out already. */
8053 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8054 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8056 unsigned HOST_WIDE_INT innerprec
8057 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8058 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8059 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8060 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8062 /* We can get this structure field in a narrower type that fits it,
8063 but the resulting extension to its nominal type (a fullword type)
8064 must satisfy the same conditions as for other extensions.
8066 Do this only for fields that are aligned (not bit-fields),
8067 because when bit-field insns will be used there is no
8068 advantage in doing this. */
8070 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8071 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8072 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8073 && type != 0)
8075 if (first)
8076 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8077 win = fold_convert (type, op);
8081 *unsignedp_ptr = uns;
8082 return win;
8085 /* Return true if integer constant C has a value that is permissible
8086 for TYPE, an integral type. */
8088 bool
8089 int_fits_type_p (const_tree c, const_tree type)
8091 tree type_low_bound, type_high_bound;
8092 bool ok_for_low_bound, ok_for_high_bound;
8093 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8095 /* Non-standard boolean types can have arbitrary precision but various
8096 transformations assume that they can only take values 0 and +/-1. */
8097 if (TREE_CODE (type) == BOOLEAN_TYPE)
8098 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8100 retry:
8101 type_low_bound = TYPE_MIN_VALUE (type);
8102 type_high_bound = TYPE_MAX_VALUE (type);
8104 /* If at least one bound of the type is a constant integer, we can check
8105 ourselves and maybe make a decision. If no such decision is possible, but
8106 this type is a subtype, try checking against that. Otherwise, use
8107 fits_to_tree_p, which checks against the precision.
8109 Compute the status for each possibly constant bound, and return if we see
8110 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8111 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8112 for "constant known to fit". */
8114 /* Check if c >= type_low_bound. */
8115 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8117 if (tree_int_cst_lt (c, type_low_bound))
8118 return false;
8119 ok_for_low_bound = true;
8121 else
8122 ok_for_low_bound = false;
8124 /* Check if c <= type_high_bound. */
8125 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8127 if (tree_int_cst_lt (type_high_bound, c))
8128 return false;
8129 ok_for_high_bound = true;
8131 else
8132 ok_for_high_bound = false;
8134 /* If the constant fits both bounds, the result is known. */
8135 if (ok_for_low_bound && ok_for_high_bound)
8136 return true;
8138 /* Perform some generic filtering which may allow making a decision
8139 even if the bounds are not constant. First, negative integers
8140 never fit in unsigned types, */
8141 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8142 return false;
8144 /* Second, narrower types always fit in wider ones. */
8145 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8146 return true;
8148 /* Third, unsigned integers with top bit set never fit signed types. */
8149 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8151 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8152 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8154 /* When a tree_cst is converted to a wide-int, the precision
8155 is taken from the type. However, if the precision of the
8156 mode underneath the type is smaller than that, it is
8157 possible that the value will not fit. The test below
8158 fails if any bit is set between the sign bit of the
8159 underlying mode and the top bit of the type. */
8160 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8161 return false;
8163 else if (wi::neg_p (wi::to_wide (c)))
8164 return false;
8167 /* If we haven't been able to decide at this point, there nothing more we
8168 can check ourselves here. Look at the base type if we have one and it
8169 has the same precision. */
8170 if (TREE_CODE (type) == INTEGER_TYPE
8171 && TREE_TYPE (type) != 0
8172 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8174 type = TREE_TYPE (type);
8175 goto retry;
8178 /* Or to fits_to_tree_p, if nothing else. */
8179 return wi::fits_to_tree_p (wi::to_wide (c), type);
8182 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8183 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8184 represented (assuming two's-complement arithmetic) within the bit
8185 precision of the type are returned instead. */
8187 void
8188 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8190 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8191 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8192 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8193 else
8195 if (TYPE_UNSIGNED (type))
8196 mpz_set_ui (min, 0);
8197 else
8199 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8200 wi::to_mpz (mn, min, SIGNED);
8204 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8205 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8206 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8207 else
8209 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8210 wi::to_mpz (mn, max, TYPE_SIGN (type));
8214 /* Return true if VAR is an automatic variable. */
8216 bool
8217 auto_var_p (const_tree var)
8219 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8220 || TREE_CODE (var) == PARM_DECL)
8221 && ! TREE_STATIC (var))
8222 || TREE_CODE (var) == RESULT_DECL);
8225 /* Return true if VAR is an automatic variable defined in function FN. */
8227 bool
8228 auto_var_in_fn_p (const_tree var, const_tree fn)
8230 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8231 && (auto_var_p (var)
8232 || TREE_CODE (var) == LABEL_DECL));
8235 /* Subprogram of following function. Called by walk_tree.
8237 Return *TP if it is an automatic variable or parameter of the
8238 function passed in as DATA. */
8240 static tree
8241 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8243 tree fn = (tree) data;
8245 if (TYPE_P (*tp))
8246 *walk_subtrees = 0;
8248 else if (DECL_P (*tp)
8249 && auto_var_in_fn_p (*tp, fn))
8250 return *tp;
8252 return NULL_TREE;
8255 /* Returns true if T is, contains, or refers to a type with variable
8256 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8257 arguments, but not the return type. If FN is nonzero, only return
8258 true if a modifier of the type or position of FN is a variable or
8259 parameter inside FN.
8261 This concept is more general than that of C99 'variably modified types':
8262 in C99, a struct type is never variably modified because a VLA may not
8263 appear as a structure member. However, in GNU C code like:
8265 struct S { int i[f()]; };
8267 is valid, and other languages may define similar constructs. */
8269 bool
8270 variably_modified_type_p (tree type, tree fn)
8272 tree t;
8274 /* Test if T is either variable (if FN is zero) or an expression containing
8275 a variable in FN. If TYPE isn't gimplified, return true also if
8276 gimplify_one_sizepos would gimplify the expression into a local
8277 variable. */
8278 #define RETURN_TRUE_IF_VAR(T) \
8279 do { tree _t = (T); \
8280 if (_t != NULL_TREE \
8281 && _t != error_mark_node \
8282 && !CONSTANT_CLASS_P (_t) \
8283 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8284 && (!fn \
8285 || (!TYPE_SIZES_GIMPLIFIED (type) \
8286 && (TREE_CODE (_t) != VAR_DECL \
8287 && !CONTAINS_PLACEHOLDER_P (_t))) \
8288 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8289 return true; } while (0)
8291 if (type == error_mark_node)
8292 return false;
8294 /* If TYPE itself has variable size, it is variably modified. */
8295 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8296 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8298 switch (TREE_CODE (type))
8300 case POINTER_TYPE:
8301 case REFERENCE_TYPE:
8302 case VECTOR_TYPE:
8303 /* Ada can have pointer types refering to themselves indirectly. */
8304 if (TREE_VISITED (type))
8305 return false;
8306 TREE_VISITED (type) = true;
8307 if (variably_modified_type_p (TREE_TYPE (type), fn))
8309 TREE_VISITED (type) = false;
8310 return true;
8312 TREE_VISITED (type) = false;
8313 break;
8315 case FUNCTION_TYPE:
8316 case METHOD_TYPE:
8317 /* If TYPE is a function type, it is variably modified if the
8318 return type is variably modified. */
8319 if (variably_modified_type_p (TREE_TYPE (type), fn))
8320 return true;
8321 break;
8323 case INTEGER_TYPE:
8324 case REAL_TYPE:
8325 case FIXED_POINT_TYPE:
8326 case ENUMERAL_TYPE:
8327 case BOOLEAN_TYPE:
8328 /* Scalar types are variably modified if their end points
8329 aren't constant. */
8330 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8331 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8332 break;
8334 case RECORD_TYPE:
8335 case UNION_TYPE:
8336 case QUAL_UNION_TYPE:
8337 /* We can't see if any of the fields are variably-modified by the
8338 definition we normally use, since that would produce infinite
8339 recursion via pointers. */
8340 /* This is variably modified if some field's type is. */
8341 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8342 if (TREE_CODE (t) == FIELD_DECL)
8344 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8345 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8346 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8348 /* If the type is a qualified union, then the DECL_QUALIFIER
8349 of fields can also be an expression containing a variable. */
8350 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8351 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8353 /* If the field is a qualified union, then it's only a container
8354 for what's inside so we look into it. That's necessary in LTO
8355 mode because the sizes of the field tested above have been set
8356 to PLACEHOLDER_EXPRs by free_lang_data. */
8357 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8358 && variably_modified_type_p (TREE_TYPE (t), fn))
8359 return true;
8361 break;
8363 case ARRAY_TYPE:
8364 /* Do not call ourselves to avoid infinite recursion. This is
8365 variably modified if the element type is. */
8366 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8367 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8368 break;
8370 default:
8371 break;
8374 /* The current language may have other cases to check, but in general,
8375 all other types are not variably modified. */
8376 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8378 #undef RETURN_TRUE_IF_VAR
8381 /* Given a DECL or TYPE, return the scope in which it was declared, or
8382 NULL_TREE if there is no containing scope. */
8384 tree
8385 get_containing_scope (const_tree t)
8387 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8390 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8392 const_tree
8393 get_ultimate_context (const_tree decl)
8395 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8397 if (TREE_CODE (decl) == BLOCK)
8398 decl = BLOCK_SUPERCONTEXT (decl);
8399 else
8400 decl = get_containing_scope (decl);
8402 return decl;
8405 /* Return the innermost context enclosing DECL that is
8406 a FUNCTION_DECL, or zero if none. */
8408 tree
8409 decl_function_context (const_tree decl)
8411 tree context;
8413 if (TREE_CODE (decl) == ERROR_MARK)
8414 return 0;
8416 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8417 where we look up the function at runtime. Such functions always take
8418 a first argument of type 'pointer to real context'.
8420 C++ should really be fixed to use DECL_CONTEXT for the real context,
8421 and use something else for the "virtual context". */
8422 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8423 context
8424 = TYPE_MAIN_VARIANT
8425 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8426 else
8427 context = DECL_CONTEXT (decl);
8429 while (context && TREE_CODE (context) != FUNCTION_DECL)
8431 if (TREE_CODE (context) == BLOCK)
8432 context = BLOCK_SUPERCONTEXT (context);
8433 else
8434 context = get_containing_scope (context);
8437 return context;
8440 /* Return the innermost context enclosing DECL that is
8441 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8442 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8444 tree
8445 decl_type_context (const_tree decl)
8447 tree context = DECL_CONTEXT (decl);
8449 while (context)
8450 switch (TREE_CODE (context))
8452 case NAMESPACE_DECL:
8453 case TRANSLATION_UNIT_DECL:
8454 return NULL_TREE;
8456 case RECORD_TYPE:
8457 case UNION_TYPE:
8458 case QUAL_UNION_TYPE:
8459 return context;
8461 case TYPE_DECL:
8462 case FUNCTION_DECL:
8463 context = DECL_CONTEXT (context);
8464 break;
8466 case BLOCK:
8467 context = BLOCK_SUPERCONTEXT (context);
8468 break;
8470 default:
8471 gcc_unreachable ();
8474 return NULL_TREE;
8477 /* CALL is a CALL_EXPR. Return the declaration for the function
8478 called, or NULL_TREE if the called function cannot be
8479 determined. */
8481 tree
8482 get_callee_fndecl (const_tree call)
8484 tree addr;
8486 if (call == error_mark_node)
8487 return error_mark_node;
8489 /* It's invalid to call this function with anything but a
8490 CALL_EXPR. */
8491 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8493 /* The first operand to the CALL is the address of the function
8494 called. */
8495 addr = CALL_EXPR_FN (call);
8497 /* If there is no function, return early. */
8498 if (addr == NULL_TREE)
8499 return NULL_TREE;
8501 STRIP_NOPS (addr);
8503 /* If this is a readonly function pointer, extract its initial value. */
8504 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8505 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8506 && DECL_INITIAL (addr))
8507 addr = DECL_INITIAL (addr);
8509 /* If the address is just `&f' for some function `f', then we know
8510 that `f' is being called. */
8511 if (TREE_CODE (addr) == ADDR_EXPR
8512 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8513 return TREE_OPERAND (addr, 0);
8515 /* We couldn't figure out what was being called. */
8516 return NULL_TREE;
8519 /* Return true when STMTs arguments and return value match those of FNDECL,
8520 a decl of a builtin function. */
8522 static bool
8523 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8525 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8527 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8528 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8529 fndecl = decl;
8531 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8532 if (gimple_form
8533 ? !useless_type_conversion_p (TREE_TYPE (call),
8534 TREE_TYPE (TREE_TYPE (fndecl)))
8535 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8536 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8537 return false;
8539 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8540 unsigned nargs = call_expr_nargs (call);
8541 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8543 /* Variadic args follow. */
8544 if (!targs)
8545 return true;
8546 tree arg = CALL_EXPR_ARG (call, i);
8547 tree type = TREE_VALUE (targs);
8548 if (gimple_form
8549 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8550 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8552 /* For pointer arguments be more forgiving, e.g. due to
8553 FILE * vs. fileptr_type_node, or say char * vs. const char *
8554 differences etc. */
8555 if (!gimple_form
8556 && POINTER_TYPE_P (type)
8557 && POINTER_TYPE_P (TREE_TYPE (arg))
8558 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8559 continue;
8560 /* char/short integral arguments are promoted to int
8561 by several frontends if targetm.calls.promote_prototypes
8562 is true. Allow such promotion too. */
8563 if (INTEGRAL_TYPE_P (type)
8564 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8565 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8566 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8567 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8568 && (gimple_form
8569 ? useless_type_conversion_p (integer_type_node,
8570 TREE_TYPE (arg))
8571 : tree_nop_conversion_p (integer_type_node,
8572 TREE_TYPE (arg))))
8573 continue;
8574 return false;
8577 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8578 return false;
8579 return true;
8582 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8583 return the associated function code, otherwise return CFN_LAST. */
8585 combined_fn
8586 get_call_combined_fn (const_tree call)
8588 /* It's invalid to call this function with anything but a CALL_EXPR. */
8589 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8591 if (!CALL_EXPR_FN (call))
8592 return as_combined_fn (CALL_EXPR_IFN (call));
8594 tree fndecl = get_callee_fndecl (call);
8595 if (fndecl
8596 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8597 && tree_builtin_call_types_compatible_p (call, fndecl))
8598 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8600 return CFN_LAST;
8603 /* Comparator of indices based on tree_node_counts. */
8605 static int
8606 tree_nodes_cmp (const void *p1, const void *p2)
8608 const unsigned *n1 = (const unsigned *)p1;
8609 const unsigned *n2 = (const unsigned *)p2;
8611 return tree_node_counts[*n1] - tree_node_counts[*n2];
8614 /* Comparator of indices based on tree_code_counts. */
8616 static int
8617 tree_codes_cmp (const void *p1, const void *p2)
8619 const unsigned *n1 = (const unsigned *)p1;
8620 const unsigned *n2 = (const unsigned *)p2;
8622 return tree_code_counts[*n1] - tree_code_counts[*n2];
8625 #define TREE_MEM_USAGE_SPACES 40
8627 /* Print debugging information about tree nodes generated during the compile,
8628 and any language-specific information. */
8630 void
8631 dump_tree_statistics (void)
8633 if (GATHER_STATISTICS)
8635 uint64_t total_nodes, total_bytes;
8636 fprintf (stderr, "\nKind Nodes Bytes\n");
8637 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8638 total_nodes = total_bytes = 0;
8641 auto_vec<unsigned> indices (all_kinds);
8642 for (unsigned i = 0; i < all_kinds; i++)
8643 indices.quick_push (i);
8644 indices.qsort (tree_nodes_cmp);
8646 for (unsigned i = 0; i < (int) all_kinds; i++)
8648 unsigned j = indices[i];
8649 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8650 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8651 SIZE_AMOUNT (tree_node_sizes[j]));
8652 total_nodes += tree_node_counts[j];
8653 total_bytes += tree_node_sizes[j];
8655 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8656 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8657 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8658 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8662 fprintf (stderr, "Code Nodes\n");
8663 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8665 auto_vec<unsigned> indices (MAX_TREE_CODES);
8666 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8667 indices.quick_push (i);
8668 indices.qsort (tree_codes_cmp);
8670 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8672 unsigned j = indices[i];
8673 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8674 get_tree_code_name ((enum tree_code) j),
8675 SIZE_AMOUNT (tree_code_counts[j]));
8677 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8678 fprintf (stderr, "\n");
8679 ssanames_print_statistics ();
8680 fprintf (stderr, "\n");
8681 phinodes_print_statistics ();
8682 fprintf (stderr, "\n");
8685 else
8686 fprintf (stderr, "(No per-node statistics)\n");
8688 print_type_hash_statistics ();
8689 print_debug_expr_statistics ();
8690 print_value_expr_statistics ();
8691 lang_hooks.print_statistics ();
8694 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8696 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8698 unsigned
8699 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8701 /* This relies on the raw feedback's top 4 bits being zero. */
8702 #define FEEDBACK(X) ((X) * 0x04c11db7)
8703 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8704 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8705 static const unsigned syndromes[16] =
8707 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8708 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8709 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8710 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8712 #undef FEEDBACK
8713 #undef SYNDROME
8715 value <<= (32 - bytes * 8);
8716 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8718 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8720 chksum = (chksum << 4) ^ feedback;
8723 return chksum;
8726 /* Generate a crc32 of a string. */
8728 unsigned
8729 crc32_string (unsigned chksum, const char *string)
8732 chksum = crc32_byte (chksum, *string);
8733 while (*string++);
8734 return chksum;
8737 /* P is a string that will be used in a symbol. Mask out any characters
8738 that are not valid in that context. */
8740 void
8741 clean_symbol_name (char *p)
8743 for (; *p; p++)
8744 if (! (ISALNUM (*p)
8745 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8746 || *p == '$'
8747 #endif
8748 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8749 || *p == '.'
8750 #endif
8752 *p = '_';
8755 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8757 /* Create a unique anonymous identifier. The identifier is still a
8758 valid assembly label. */
8760 tree
8761 make_anon_name ()
8763 const char *fmt =
8764 #if !defined (NO_DOT_IN_LABEL)
8766 #elif !defined (NO_DOLLAR_IN_LABEL)
8768 #else
8770 #endif
8771 "_anon_%d";
8773 char buf[24];
8774 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8775 gcc_checking_assert (len < int (sizeof (buf)));
8777 tree id = get_identifier_with_length (buf, len);
8778 IDENTIFIER_ANON_P (id) = true;
8780 return id;
8783 /* Generate a name for a special-purpose function.
8784 The generated name may need to be unique across the whole link.
8785 Changes to this function may also require corresponding changes to
8786 xstrdup_mask_random.
8787 TYPE is some string to identify the purpose of this function to the
8788 linker or collect2; it must start with an uppercase letter,
8789 one of:
8790 I - for constructors
8791 D - for destructors
8792 N - for C++ anonymous namespaces
8793 F - for DWARF unwind frame information. */
8795 tree
8796 get_file_function_name (const char *type)
8798 char *buf;
8799 const char *p;
8800 char *q;
8802 /* If we already have a name we know to be unique, just use that. */
8803 if (first_global_object_name)
8804 p = q = ASTRDUP (first_global_object_name);
8805 /* If the target is handling the constructors/destructors, they
8806 will be local to this file and the name is only necessary for
8807 debugging purposes.
8808 We also assign sub_I and sub_D sufixes to constructors called from
8809 the global static constructors. These are always local. */
8810 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8811 || (startswith (type, "sub_")
8812 && (type[4] == 'I' || type[4] == 'D')))
8814 const char *file = main_input_filename;
8815 if (! file)
8816 file = LOCATION_FILE (input_location);
8817 /* Just use the file's basename, because the full pathname
8818 might be quite long. */
8819 p = q = ASTRDUP (lbasename (file));
8821 else
8823 /* Otherwise, the name must be unique across the entire link.
8824 We don't have anything that we know to be unique to this translation
8825 unit, so use what we do have and throw in some randomness. */
8826 unsigned len;
8827 const char *name = weak_global_object_name;
8828 const char *file = main_input_filename;
8830 if (! name)
8831 name = "";
8832 if (! file)
8833 file = LOCATION_FILE (input_location);
8835 len = strlen (file);
8836 q = (char *) alloca (9 + 19 + len + 1);
8837 memcpy (q, file, len + 1);
8839 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8840 crc32_string (0, name), get_random_seed (false));
8842 p = q;
8845 clean_symbol_name (q);
8846 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8847 + strlen (type));
8849 /* Set up the name of the file-level functions we may need.
8850 Use a global object (which is already required to be unique over
8851 the program) rather than the file name (which imposes extra
8852 constraints). */
8853 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8855 return get_identifier (buf);
8858 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8860 /* Complain that the tree code of NODE does not match the expected 0
8861 terminated list of trailing codes. The trailing code list can be
8862 empty, for a more vague error message. FILE, LINE, and FUNCTION
8863 are of the caller. */
8865 void
8866 tree_check_failed (const_tree node, const char *file,
8867 int line, const char *function, ...)
8869 va_list args;
8870 const char *buffer;
8871 unsigned length = 0;
8872 enum tree_code code;
8874 va_start (args, function);
8875 while ((code = (enum tree_code) va_arg (args, int)))
8876 length += 4 + strlen (get_tree_code_name (code));
8877 va_end (args);
8878 if (length)
8880 char *tmp;
8881 va_start (args, function);
8882 length += strlen ("expected ");
8883 buffer = tmp = (char *) alloca (length);
8884 length = 0;
8885 while ((code = (enum tree_code) va_arg (args, int)))
8887 const char *prefix = length ? " or " : "expected ";
8889 strcpy (tmp + length, prefix);
8890 length += strlen (prefix);
8891 strcpy (tmp + length, get_tree_code_name (code));
8892 length += strlen (get_tree_code_name (code));
8894 va_end (args);
8896 else
8897 buffer = "unexpected node";
8899 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8900 buffer, get_tree_code_name (TREE_CODE (node)),
8901 function, trim_filename (file), line);
8904 /* Complain that the tree code of NODE does match the expected 0
8905 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8906 the caller. */
8908 void
8909 tree_not_check_failed (const_tree node, const char *file,
8910 int line, const char *function, ...)
8912 va_list args;
8913 char *buffer;
8914 unsigned length = 0;
8915 enum tree_code code;
8917 va_start (args, function);
8918 while ((code = (enum tree_code) va_arg (args, int)))
8919 length += 4 + strlen (get_tree_code_name (code));
8920 va_end (args);
8921 va_start (args, function);
8922 buffer = (char *) alloca (length);
8923 length = 0;
8924 while ((code = (enum tree_code) va_arg (args, int)))
8926 if (length)
8928 strcpy (buffer + length, " or ");
8929 length += 4;
8931 strcpy (buffer + length, get_tree_code_name (code));
8932 length += strlen (get_tree_code_name (code));
8934 va_end (args);
8936 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8937 buffer, get_tree_code_name (TREE_CODE (node)),
8938 function, trim_filename (file), line);
8941 /* Similar to tree_check_failed, except that we check for a class of tree
8942 code, given in CL. */
8944 void
8945 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8946 const char *file, int line, const char *function)
8948 internal_error
8949 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8950 TREE_CODE_CLASS_STRING (cl),
8951 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8952 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8955 /* Similar to tree_check_failed, except that instead of specifying a
8956 dozen codes, use the knowledge that they're all sequential. */
8958 void
8959 tree_range_check_failed (const_tree node, const char *file, int line,
8960 const char *function, enum tree_code c1,
8961 enum tree_code c2)
8963 char *buffer;
8964 unsigned length = 0;
8965 unsigned int c;
8967 for (c = c1; c <= c2; ++c)
8968 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8970 length += strlen ("expected ");
8971 buffer = (char *) alloca (length);
8972 length = 0;
8974 for (c = c1; c <= c2; ++c)
8976 const char *prefix = length ? " or " : "expected ";
8978 strcpy (buffer + length, prefix);
8979 length += strlen (prefix);
8980 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8981 length += strlen (get_tree_code_name ((enum tree_code) c));
8984 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8985 buffer, get_tree_code_name (TREE_CODE (node)),
8986 function, trim_filename (file), line);
8990 /* Similar to tree_check_failed, except that we check that a tree does
8991 not have the specified code, given in CL. */
8993 void
8994 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8995 const char *file, int line, const char *function)
8997 internal_error
8998 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8999 TREE_CODE_CLASS_STRING (cl),
9000 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9001 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9005 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9007 void
9008 omp_clause_check_failed (const_tree node, const char *file, int line,
9009 const char *function, enum omp_clause_code code)
9011 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9012 "in %s, at %s:%d",
9013 omp_clause_code_name[code],
9014 get_tree_code_name (TREE_CODE (node)),
9015 function, trim_filename (file), line);
9019 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9021 void
9022 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9023 const char *function, enum omp_clause_code c1,
9024 enum omp_clause_code c2)
9026 char *buffer;
9027 unsigned length = 0;
9028 unsigned int c;
9030 for (c = c1; c <= c2; ++c)
9031 length += 4 + strlen (omp_clause_code_name[c]);
9033 length += strlen ("expected ");
9034 buffer = (char *) alloca (length);
9035 length = 0;
9037 for (c = c1; c <= c2; ++c)
9039 const char *prefix = length ? " or " : "expected ";
9041 strcpy (buffer + length, prefix);
9042 length += strlen (prefix);
9043 strcpy (buffer + length, omp_clause_code_name[c]);
9044 length += strlen (omp_clause_code_name[c]);
9047 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9048 buffer, omp_clause_code_name[TREE_CODE (node)],
9049 function, trim_filename (file), line);
9053 #undef DEFTREESTRUCT
9054 #define DEFTREESTRUCT(VAL, NAME) NAME,
9056 static const char *ts_enum_names[] = {
9057 #include "treestruct.def"
9059 #undef DEFTREESTRUCT
9061 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9063 /* Similar to tree_class_check_failed, except that we check for
9064 whether CODE contains the tree structure identified by EN. */
9066 void
9067 tree_contains_struct_check_failed (const_tree node,
9068 const enum tree_node_structure_enum en,
9069 const char *file, int line,
9070 const char *function)
9072 internal_error
9073 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9074 TS_ENUM_NAME (en),
9075 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9079 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9080 (dynamically sized) vector. */
9082 void
9083 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9084 const char *function)
9086 internal_error
9087 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9088 "at %s:%d",
9089 idx + 1, len, function, trim_filename (file), line);
9092 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9093 (dynamically sized) vector. */
9095 void
9096 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9097 const char *function)
9099 internal_error
9100 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9101 idx + 1, len, function, trim_filename (file), line);
9104 /* Similar to above, except that the check is for the bounds of the operand
9105 vector of an expression node EXP. */
9107 void
9108 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9109 int line, const char *function)
9111 enum tree_code code = TREE_CODE (exp);
9112 internal_error
9113 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9114 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9115 function, trim_filename (file), line);
9118 /* Similar to above, except that the check is for the number of
9119 operands of an OMP_CLAUSE node. */
9121 void
9122 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9123 int line, const char *function)
9125 internal_error
9126 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9127 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9128 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9129 trim_filename (file), line);
9131 #endif /* ENABLE_TREE_CHECKING */
9133 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9134 and mapped to the machine mode MODE. Initialize its fields and build
9135 the information necessary for debugging output. */
9137 static tree
9138 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9140 tree t;
9141 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9143 t = make_node (VECTOR_TYPE);
9144 TREE_TYPE (t) = mv_innertype;
9145 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9146 SET_TYPE_MODE (t, mode);
9148 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9149 SET_TYPE_STRUCTURAL_EQUALITY (t);
9150 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9151 || mode != VOIDmode)
9152 && !VECTOR_BOOLEAN_TYPE_P (t))
9153 TYPE_CANONICAL (t)
9154 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9156 layout_type (t);
9158 hashval_t hash = type_hash_canon_hash (t);
9159 t = type_hash_canon (hash, t);
9161 /* We have built a main variant, based on the main variant of the
9162 inner type. Use it to build the variant we return. */
9163 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9164 && TREE_TYPE (t) != innertype)
9165 return build_type_attribute_qual_variant (t,
9166 TYPE_ATTRIBUTES (innertype),
9167 TYPE_QUALS (innertype));
9169 return t;
9172 static tree
9173 make_or_reuse_type (unsigned size, int unsignedp)
9175 int i;
9177 if (size == INT_TYPE_SIZE)
9178 return unsignedp ? unsigned_type_node : integer_type_node;
9179 if (size == CHAR_TYPE_SIZE)
9180 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9181 if (size == SHORT_TYPE_SIZE)
9182 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9183 if (size == LONG_TYPE_SIZE)
9184 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9185 if (size == LONG_LONG_TYPE_SIZE)
9186 return (unsignedp ? long_long_unsigned_type_node
9187 : long_long_integer_type_node);
9189 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9190 if (size == int_n_data[i].bitsize
9191 && int_n_enabled_p[i])
9192 return (unsignedp ? int_n_trees[i].unsigned_type
9193 : int_n_trees[i].signed_type);
9195 if (unsignedp)
9196 return make_unsigned_type (size);
9197 else
9198 return make_signed_type (size);
9201 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9203 static tree
9204 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9206 if (satp)
9208 if (size == SHORT_FRACT_TYPE_SIZE)
9209 return unsignedp ? sat_unsigned_short_fract_type_node
9210 : sat_short_fract_type_node;
9211 if (size == FRACT_TYPE_SIZE)
9212 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9213 if (size == LONG_FRACT_TYPE_SIZE)
9214 return unsignedp ? sat_unsigned_long_fract_type_node
9215 : sat_long_fract_type_node;
9216 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9217 return unsignedp ? sat_unsigned_long_long_fract_type_node
9218 : sat_long_long_fract_type_node;
9220 else
9222 if (size == SHORT_FRACT_TYPE_SIZE)
9223 return unsignedp ? unsigned_short_fract_type_node
9224 : short_fract_type_node;
9225 if (size == FRACT_TYPE_SIZE)
9226 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9227 if (size == LONG_FRACT_TYPE_SIZE)
9228 return unsignedp ? unsigned_long_fract_type_node
9229 : long_fract_type_node;
9230 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9231 return unsignedp ? unsigned_long_long_fract_type_node
9232 : long_long_fract_type_node;
9235 return make_fract_type (size, unsignedp, satp);
9238 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9240 static tree
9241 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9243 if (satp)
9245 if (size == SHORT_ACCUM_TYPE_SIZE)
9246 return unsignedp ? sat_unsigned_short_accum_type_node
9247 : sat_short_accum_type_node;
9248 if (size == ACCUM_TYPE_SIZE)
9249 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9250 if (size == LONG_ACCUM_TYPE_SIZE)
9251 return unsignedp ? sat_unsigned_long_accum_type_node
9252 : sat_long_accum_type_node;
9253 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9254 return unsignedp ? sat_unsigned_long_long_accum_type_node
9255 : sat_long_long_accum_type_node;
9257 else
9259 if (size == SHORT_ACCUM_TYPE_SIZE)
9260 return unsignedp ? unsigned_short_accum_type_node
9261 : short_accum_type_node;
9262 if (size == ACCUM_TYPE_SIZE)
9263 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9264 if (size == LONG_ACCUM_TYPE_SIZE)
9265 return unsignedp ? unsigned_long_accum_type_node
9266 : long_accum_type_node;
9267 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9268 return unsignedp ? unsigned_long_long_accum_type_node
9269 : long_long_accum_type_node;
9272 return make_accum_type (size, unsignedp, satp);
9276 /* Create an atomic variant node for TYPE. This routine is called
9277 during initialization of data types to create the 5 basic atomic
9278 types. The generic build_variant_type function requires these to
9279 already be set up in order to function properly, so cannot be
9280 called from there. If ALIGN is non-zero, then ensure alignment is
9281 overridden to this value. */
9283 static tree
9284 build_atomic_base (tree type, unsigned int align)
9286 tree t;
9288 /* Make sure its not already registered. */
9289 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9290 return t;
9292 t = build_variant_type_copy (type);
9293 set_type_quals (t, TYPE_QUAL_ATOMIC);
9295 if (align)
9296 SET_TYPE_ALIGN (t, align);
9298 return t;
9301 /* Information about the _FloatN and _FloatNx types. This must be in
9302 the same order as the corresponding TI_* enum values. */
9303 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9305 { 16, false },
9306 { 32, false },
9307 { 64, false },
9308 { 128, false },
9309 { 32, true },
9310 { 64, true },
9311 { 128, true },
9315 /* Create nodes for all integer types (and error_mark_node) using the sizes
9316 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9318 void
9319 build_common_tree_nodes (bool signed_char)
9321 int i;
9323 error_mark_node = make_node (ERROR_MARK);
9324 TREE_TYPE (error_mark_node) = error_mark_node;
9326 initialize_sizetypes ();
9328 /* Define both `signed char' and `unsigned char'. */
9329 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9330 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9331 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9332 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9334 /* Define `char', which is like either `signed char' or `unsigned char'
9335 but not the same as either. */
9336 char_type_node
9337 = (signed_char
9338 ? make_signed_type (CHAR_TYPE_SIZE)
9339 : make_unsigned_type (CHAR_TYPE_SIZE));
9340 TYPE_STRING_FLAG (char_type_node) = 1;
9342 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9343 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9344 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9345 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9346 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9347 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9348 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9349 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9351 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9353 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9354 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9356 if (int_n_enabled_p[i])
9358 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9359 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9363 /* Define a boolean type. This type only represents boolean values but
9364 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9365 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9366 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9367 TYPE_PRECISION (boolean_type_node) = 1;
9368 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9370 /* Define what type to use for size_t. */
9371 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9372 size_type_node = unsigned_type_node;
9373 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9374 size_type_node = long_unsigned_type_node;
9375 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9376 size_type_node = long_long_unsigned_type_node;
9377 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9378 size_type_node = short_unsigned_type_node;
9379 else
9381 int i;
9383 size_type_node = NULL_TREE;
9384 for (i = 0; i < NUM_INT_N_ENTS; i++)
9385 if (int_n_enabled_p[i])
9387 char name[50], altname[50];
9388 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9389 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9391 if (strcmp (name, SIZE_TYPE) == 0
9392 || strcmp (altname, SIZE_TYPE) == 0)
9394 size_type_node = int_n_trees[i].unsigned_type;
9397 if (size_type_node == NULL_TREE)
9398 gcc_unreachable ();
9401 /* Define what type to use for ptrdiff_t. */
9402 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9403 ptrdiff_type_node = integer_type_node;
9404 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9405 ptrdiff_type_node = long_integer_type_node;
9406 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9407 ptrdiff_type_node = long_long_integer_type_node;
9408 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9409 ptrdiff_type_node = short_integer_type_node;
9410 else
9412 ptrdiff_type_node = NULL_TREE;
9413 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9414 if (int_n_enabled_p[i])
9416 char name[50], altname[50];
9417 sprintf (name, "__int%d", int_n_data[i].bitsize);
9418 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9420 if (strcmp (name, PTRDIFF_TYPE) == 0
9421 || strcmp (altname, PTRDIFF_TYPE) == 0)
9422 ptrdiff_type_node = int_n_trees[i].signed_type;
9424 if (ptrdiff_type_node == NULL_TREE)
9425 gcc_unreachable ();
9428 /* Fill in the rest of the sized types. Reuse existing type nodes
9429 when possible. */
9430 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9431 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9432 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9433 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9434 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9436 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9437 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9438 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9439 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9440 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9442 /* Don't call build_qualified type for atomics. That routine does
9443 special processing for atomics, and until they are initialized
9444 it's better not to make that call.
9446 Check to see if there is a target override for atomic types. */
9448 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9449 targetm.atomic_align_for_mode (QImode));
9450 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9451 targetm.atomic_align_for_mode (HImode));
9452 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9453 targetm.atomic_align_for_mode (SImode));
9454 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9455 targetm.atomic_align_for_mode (DImode));
9456 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9457 targetm.atomic_align_for_mode (TImode));
9459 access_public_node = get_identifier ("public");
9460 access_protected_node = get_identifier ("protected");
9461 access_private_node = get_identifier ("private");
9463 /* Define these next since types below may used them. */
9464 integer_zero_node = build_int_cst (integer_type_node, 0);
9465 integer_one_node = build_int_cst (integer_type_node, 1);
9466 integer_three_node = build_int_cst (integer_type_node, 3);
9467 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9469 size_zero_node = size_int (0);
9470 size_one_node = size_int (1);
9471 bitsize_zero_node = bitsize_int (0);
9472 bitsize_one_node = bitsize_int (1);
9473 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9475 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9476 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9478 void_type_node = make_node (VOID_TYPE);
9479 layout_type (void_type_node);
9481 /* We are not going to have real types in C with less than byte alignment,
9482 so we might as well not have any types that claim to have it. */
9483 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9484 TYPE_USER_ALIGN (void_type_node) = 0;
9486 void_node = make_node (VOID_CST);
9487 TREE_TYPE (void_node) = void_type_node;
9489 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9491 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9492 layout_type (TREE_TYPE (null_pointer_node));
9494 ptr_type_node = build_pointer_type (void_type_node);
9495 const_ptr_type_node
9496 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9497 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9498 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9500 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9502 float_type_node = make_node (REAL_TYPE);
9503 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9504 layout_type (float_type_node);
9506 double_type_node = make_node (REAL_TYPE);
9507 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9508 layout_type (double_type_node);
9510 long_double_type_node = make_node (REAL_TYPE);
9511 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9512 layout_type (long_double_type_node);
9514 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9516 int n = floatn_nx_types[i].n;
9517 bool extended = floatn_nx_types[i].extended;
9518 scalar_float_mode mode;
9519 if (!targetm.floatn_mode (n, extended).exists (&mode))
9520 continue;
9521 int precision = GET_MODE_PRECISION (mode);
9522 /* Work around the rs6000 KFmode having precision 113 not
9523 128. */
9524 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9525 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9526 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9527 if (!extended)
9528 gcc_assert (min_precision == n);
9529 if (precision < min_precision)
9530 precision = min_precision;
9531 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9532 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9533 layout_type (FLOATN_NX_TYPE_NODE (i));
9534 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9536 float128t_type_node = float128_type_node;
9537 #ifdef HAVE_BFmode
9538 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9539 && targetm.scalar_mode_supported_p (BFmode)
9540 && targetm.libgcc_floating_mode_supported_p (BFmode))
9542 bfloat16_type_node = make_node (REAL_TYPE);
9543 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9544 layout_type (bfloat16_type_node);
9545 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9547 #endif
9549 float_ptr_type_node = build_pointer_type (float_type_node);
9550 double_ptr_type_node = build_pointer_type (double_type_node);
9551 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9552 integer_ptr_type_node = build_pointer_type (integer_type_node);
9554 /* Fixed size integer types. */
9555 uint16_type_node = make_or_reuse_type (16, 1);
9556 uint32_type_node = make_or_reuse_type (32, 1);
9557 uint64_type_node = make_or_reuse_type (64, 1);
9558 if (targetm.scalar_mode_supported_p (TImode))
9559 uint128_type_node = make_or_reuse_type (128, 1);
9561 /* Decimal float types. */
9562 if (targetm.decimal_float_supported_p ())
9564 dfloat32_type_node = make_node (REAL_TYPE);
9565 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9566 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9567 layout_type (dfloat32_type_node);
9569 dfloat64_type_node = make_node (REAL_TYPE);
9570 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9571 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9572 layout_type (dfloat64_type_node);
9574 dfloat128_type_node = make_node (REAL_TYPE);
9575 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9576 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9577 layout_type (dfloat128_type_node);
9580 complex_integer_type_node = build_complex_type (integer_type_node, true);
9581 complex_float_type_node = build_complex_type (float_type_node, true);
9582 complex_double_type_node = build_complex_type (double_type_node, true);
9583 complex_long_double_type_node = build_complex_type (long_double_type_node,
9584 true);
9586 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9588 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9589 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9590 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9593 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9594 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9595 sat_ ## KIND ## _type_node = \
9596 make_sat_signed_ ## KIND ## _type (SIZE); \
9597 sat_unsigned_ ## KIND ## _type_node = \
9598 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9599 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9600 unsigned_ ## KIND ## _type_node = \
9601 make_unsigned_ ## KIND ## _type (SIZE);
9603 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9604 sat_ ## WIDTH ## KIND ## _type_node = \
9605 make_sat_signed_ ## KIND ## _type (SIZE); \
9606 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9607 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9608 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9609 unsigned_ ## WIDTH ## KIND ## _type_node = \
9610 make_unsigned_ ## KIND ## _type (SIZE);
9612 /* Make fixed-point type nodes based on four different widths. */
9613 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9614 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9615 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9616 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9617 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9619 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9620 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9621 NAME ## _type_node = \
9622 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9623 u ## NAME ## _type_node = \
9624 make_or_reuse_unsigned_ ## KIND ## _type \
9625 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9626 sat_ ## NAME ## _type_node = \
9627 make_or_reuse_sat_signed_ ## KIND ## _type \
9628 (GET_MODE_BITSIZE (MODE ## mode)); \
9629 sat_u ## NAME ## _type_node = \
9630 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9631 (GET_MODE_BITSIZE (U ## MODE ## mode));
9633 /* Fixed-point type and mode nodes. */
9634 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9635 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9636 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9637 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9638 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9639 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9640 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9641 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9642 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9643 MAKE_FIXED_MODE_NODE (accum, da, DA)
9644 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9647 tree t = targetm.build_builtin_va_list ();
9649 /* Many back-ends define record types without setting TYPE_NAME.
9650 If we copied the record type here, we'd keep the original
9651 record type without a name. This breaks name mangling. So,
9652 don't copy record types and let c_common_nodes_and_builtins()
9653 declare the type to be __builtin_va_list. */
9654 if (TREE_CODE (t) != RECORD_TYPE)
9655 t = build_variant_type_copy (t);
9657 va_list_type_node = t;
9660 /* SCEV analyzer global shared trees. */
9661 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9662 TREE_TYPE (chrec_dont_know) = void_type_node;
9663 chrec_known = make_node (SCEV_KNOWN);
9664 TREE_TYPE (chrec_known) = void_type_node;
9667 /* Modify DECL for given flags.
9668 TM_PURE attribute is set only on types, so the function will modify
9669 DECL's type when ECF_TM_PURE is used. */
9671 void
9672 set_call_expr_flags (tree decl, int flags)
9674 if (flags & ECF_NOTHROW)
9675 TREE_NOTHROW (decl) = 1;
9676 if (flags & ECF_CONST)
9677 TREE_READONLY (decl) = 1;
9678 if (flags & ECF_PURE)
9679 DECL_PURE_P (decl) = 1;
9680 if (flags & ECF_LOOPING_CONST_OR_PURE)
9681 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9682 if (flags & ECF_NOVOPS)
9683 DECL_IS_NOVOPS (decl) = 1;
9684 if (flags & ECF_NORETURN)
9685 TREE_THIS_VOLATILE (decl) = 1;
9686 if (flags & ECF_MALLOC)
9687 DECL_IS_MALLOC (decl) = 1;
9688 if (flags & ECF_RETURNS_TWICE)
9689 DECL_IS_RETURNS_TWICE (decl) = 1;
9690 if (flags & ECF_LEAF)
9691 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9692 NULL, DECL_ATTRIBUTES (decl));
9693 if (flags & ECF_COLD)
9694 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9695 NULL, DECL_ATTRIBUTES (decl));
9696 if (flags & ECF_RET1)
9697 DECL_ATTRIBUTES (decl)
9698 = tree_cons (get_identifier ("fn spec"),
9699 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9700 DECL_ATTRIBUTES (decl));
9701 if ((flags & ECF_TM_PURE) && flag_tm)
9702 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9703 /* Looping const or pure is implied by noreturn.
9704 There is currently no way to declare looping const or looping pure alone. */
9705 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9706 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9710 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9712 static void
9713 local_define_builtin (const char *name, tree type, enum built_in_function code,
9714 const char *library_name, int ecf_flags)
9716 tree decl;
9718 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9719 library_name, NULL_TREE);
9720 set_call_expr_flags (decl, ecf_flags);
9722 set_builtin_decl (code, decl, true);
9725 /* Call this function after instantiating all builtins that the language
9726 front end cares about. This will build the rest of the builtins
9727 and internal functions that are relied upon by the tree optimizers and
9728 the middle-end. */
9730 void
9731 build_common_builtin_nodes (void)
9733 tree tmp, ftype;
9734 int ecf_flags;
9736 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9738 ftype = build_function_type_list (void_type_node,
9739 ptr_type_node,
9740 ptr_type_node,
9741 integer_type_node,
9742 NULL_TREE);
9743 local_define_builtin ("__builtin_clear_padding", ftype,
9744 BUILT_IN_CLEAR_PADDING,
9745 "__builtin_clear_padding",
9746 ECF_LEAF | ECF_NOTHROW);
9749 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9750 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9751 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9752 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9754 ftype = build_function_type (void_type_node, void_list_node);
9755 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9756 local_define_builtin ("__builtin_unreachable", ftype,
9757 BUILT_IN_UNREACHABLE,
9758 "__builtin_unreachable",
9759 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9760 | ECF_CONST | ECF_COLD);
9761 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9762 local_define_builtin ("__builtin_unreachable trap", ftype,
9763 BUILT_IN_UNREACHABLE_TRAP,
9764 "__builtin_unreachable trap",
9765 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9766 | ECF_CONST | ECF_COLD);
9767 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9768 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9769 "abort",
9770 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9771 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9772 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9773 "__builtin_trap",
9774 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9777 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9778 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9780 ftype = build_function_type_list (ptr_type_node,
9781 ptr_type_node, const_ptr_type_node,
9782 size_type_node, NULL_TREE);
9784 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9785 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9786 "memcpy", ECF_NOTHROW | ECF_LEAF);
9787 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9788 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9789 "memmove", ECF_NOTHROW | ECF_LEAF);
9792 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9794 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9795 const_ptr_type_node, size_type_node,
9796 NULL_TREE);
9797 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9798 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9801 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9803 ftype = build_function_type_list (ptr_type_node,
9804 ptr_type_node, integer_type_node,
9805 size_type_node, NULL_TREE);
9806 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9807 "memset", ECF_NOTHROW | ECF_LEAF);
9810 /* If we're checking the stack, `alloca' can throw. */
9811 const int alloca_flags
9812 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9814 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9816 ftype = build_function_type_list (ptr_type_node,
9817 size_type_node, NULL_TREE);
9818 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9819 "alloca", alloca_flags);
9822 ftype = build_function_type_list (ptr_type_node, size_type_node,
9823 size_type_node, NULL_TREE);
9824 local_define_builtin ("__builtin_alloca_with_align", ftype,
9825 BUILT_IN_ALLOCA_WITH_ALIGN,
9826 "__builtin_alloca_with_align",
9827 alloca_flags);
9829 ftype = build_function_type_list (ptr_type_node, size_type_node,
9830 size_type_node, size_type_node, NULL_TREE);
9831 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9832 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9833 "__builtin_alloca_with_align_and_max",
9834 alloca_flags);
9836 ftype = build_function_type_list (void_type_node,
9837 ptr_type_node, ptr_type_node,
9838 ptr_type_node, NULL_TREE);
9839 local_define_builtin ("__builtin_init_trampoline", ftype,
9840 BUILT_IN_INIT_TRAMPOLINE,
9841 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9842 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9843 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9844 "__builtin_init_heap_trampoline",
9845 ECF_NOTHROW | ECF_LEAF);
9846 local_define_builtin ("__builtin_init_descriptor", ftype,
9847 BUILT_IN_INIT_DESCRIPTOR,
9848 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9850 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9851 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9852 BUILT_IN_ADJUST_TRAMPOLINE,
9853 "__builtin_adjust_trampoline",
9854 ECF_CONST | ECF_NOTHROW);
9855 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9856 BUILT_IN_ADJUST_DESCRIPTOR,
9857 "__builtin_adjust_descriptor",
9858 ECF_CONST | ECF_NOTHROW);
9860 ftype = build_function_type_list (void_type_node,
9861 ptr_type_node, ptr_type_node, NULL_TREE);
9862 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9863 local_define_builtin ("__builtin___clear_cache", ftype,
9864 BUILT_IN_CLEAR_CACHE,
9865 "__clear_cache",
9866 ECF_NOTHROW);
9868 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9869 BUILT_IN_NONLOCAL_GOTO,
9870 "__builtin_nonlocal_goto",
9871 ECF_NORETURN | ECF_NOTHROW);
9873 ftype = build_function_type_list (void_type_node,
9874 ptr_type_node, ptr_type_node, NULL_TREE);
9875 local_define_builtin ("__builtin_setjmp_setup", ftype,
9876 BUILT_IN_SETJMP_SETUP,
9877 "__builtin_setjmp_setup", ECF_NOTHROW);
9879 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9880 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9881 BUILT_IN_SETJMP_RECEIVER,
9882 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9884 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9885 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9886 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9888 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9889 local_define_builtin ("__builtin_stack_restore", ftype,
9890 BUILT_IN_STACK_RESTORE,
9891 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9893 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9894 const_ptr_type_node, size_type_node,
9895 NULL_TREE);
9896 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9897 "__builtin_memcmp_eq",
9898 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9900 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9901 "__builtin_strncmp_eq",
9902 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9904 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9905 "__builtin_strcmp_eq",
9906 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9908 /* If there's a possibility that we might use the ARM EABI, build the
9909 alternate __cxa_end_cleanup node used to resume from C++. */
9910 if (targetm.arm_eabi_unwinder)
9912 ftype = build_function_type_list (void_type_node, NULL_TREE);
9913 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9914 BUILT_IN_CXA_END_CLEANUP,
9915 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9918 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9919 local_define_builtin ("__builtin_unwind_resume", ftype,
9920 BUILT_IN_UNWIND_RESUME,
9921 ((targetm_common.except_unwind_info (&global_options)
9922 == UI_SJLJ)
9923 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9924 ECF_NORETURN);
9926 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9928 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9929 NULL_TREE);
9930 local_define_builtin ("__builtin_return_address", ftype,
9931 BUILT_IN_RETURN_ADDRESS,
9932 "__builtin_return_address",
9933 ECF_NOTHROW);
9936 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9937 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9939 ftype = build_function_type_list (void_type_node, ptr_type_node,
9940 ptr_type_node, NULL_TREE);
9941 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9942 local_define_builtin ("__cyg_profile_func_enter", ftype,
9943 BUILT_IN_PROFILE_FUNC_ENTER,
9944 "__cyg_profile_func_enter", 0);
9945 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9946 local_define_builtin ("__cyg_profile_func_exit", ftype,
9947 BUILT_IN_PROFILE_FUNC_EXIT,
9948 "__cyg_profile_func_exit", 0);
9951 /* The exception object and filter values from the runtime. The argument
9952 must be zero before exception lowering, i.e. from the front end. After
9953 exception lowering, it will be the region number for the exception
9954 landing pad. These functions are PURE instead of CONST to prevent
9955 them from being hoisted past the exception edge that will initialize
9956 its value in the landing pad. */
9957 ftype = build_function_type_list (ptr_type_node,
9958 integer_type_node, NULL_TREE);
9959 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9960 /* Only use TM_PURE if we have TM language support. */
9961 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9962 ecf_flags |= ECF_TM_PURE;
9963 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9964 "__builtin_eh_pointer", ecf_flags);
9966 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9967 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9968 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9969 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9971 ftype = build_function_type_list (void_type_node,
9972 integer_type_node, integer_type_node,
9973 NULL_TREE);
9974 local_define_builtin ("__builtin_eh_copy_values", ftype,
9975 BUILT_IN_EH_COPY_VALUES,
9976 "__builtin_eh_copy_values", ECF_NOTHROW);
9978 /* Complex multiplication and division. These are handled as builtins
9979 rather than optabs because emit_library_call_value doesn't support
9980 complex. Further, we can do slightly better with folding these
9981 beasties if the real and complex parts of the arguments are separate. */
9983 int mode;
9985 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9987 char mode_name_buf[4], *q;
9988 const char *p;
9989 enum built_in_function mcode, dcode;
9990 tree type, inner_type;
9991 const char *prefix = "__";
9993 if (targetm.libfunc_gnu_prefix)
9994 prefix = "__gnu_";
9996 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9997 if (type == NULL)
9998 continue;
9999 inner_type = TREE_TYPE (type);
10001 ftype = build_function_type_list (type, inner_type, inner_type,
10002 inner_type, inner_type, NULL_TREE);
10004 mcode = ((enum built_in_function)
10005 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10006 dcode = ((enum built_in_function)
10007 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10009 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10010 *q = TOLOWER (*p);
10011 *q = '\0';
10013 /* For -ftrapping-math these should throw from a former
10014 -fnon-call-exception stmt. */
10015 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10016 NULL);
10017 local_define_builtin (built_in_names[mcode], ftype, mcode,
10018 built_in_names[mcode],
10019 ECF_CONST | ECF_LEAF);
10021 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10022 NULL);
10023 local_define_builtin (built_in_names[dcode], ftype, dcode,
10024 built_in_names[dcode],
10025 ECF_CONST | ECF_LEAF);
10029 init_internal_fns ();
10032 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10033 better way.
10035 If we requested a pointer to a vector, build up the pointers that
10036 we stripped off while looking for the inner type. Similarly for
10037 return values from functions.
10039 The argument TYPE is the top of the chain, and BOTTOM is the
10040 new type which we will point to. */
10042 tree
10043 reconstruct_complex_type (tree type, tree bottom)
10045 tree inner, outer;
10047 if (TREE_CODE (type) == POINTER_TYPE)
10049 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10050 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10051 TYPE_REF_CAN_ALIAS_ALL (type));
10053 else if (TREE_CODE (type) == REFERENCE_TYPE)
10055 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10056 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10057 TYPE_REF_CAN_ALIAS_ALL (type));
10059 else if (TREE_CODE (type) == ARRAY_TYPE)
10061 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10062 outer = build_array_type (inner, TYPE_DOMAIN (type));
10064 else if (TREE_CODE (type) == FUNCTION_TYPE)
10066 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10067 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10068 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10070 else if (TREE_CODE (type) == METHOD_TYPE)
10072 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10073 /* The build_method_type_directly() routine prepends 'this' to argument list,
10074 so we must compensate by getting rid of it. */
10075 outer
10076 = build_method_type_directly
10077 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10078 inner,
10079 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10081 else if (TREE_CODE (type) == OFFSET_TYPE)
10083 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10084 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10086 else
10087 return bottom;
10089 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10090 TYPE_QUALS (type));
10093 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10094 the inner type. */
10095 tree
10096 build_vector_type_for_mode (tree innertype, machine_mode mode)
10098 poly_int64 nunits;
10099 unsigned int bitsize;
10101 switch (GET_MODE_CLASS (mode))
10103 case MODE_VECTOR_BOOL:
10104 case MODE_VECTOR_INT:
10105 case MODE_VECTOR_FLOAT:
10106 case MODE_VECTOR_FRACT:
10107 case MODE_VECTOR_UFRACT:
10108 case MODE_VECTOR_ACCUM:
10109 case MODE_VECTOR_UACCUM:
10110 nunits = GET_MODE_NUNITS (mode);
10111 break;
10113 case MODE_INT:
10114 /* Check that there are no leftover bits. */
10115 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10116 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10117 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10118 break;
10120 default:
10121 gcc_unreachable ();
10124 return make_vector_type (innertype, nunits, mode);
10127 /* Similarly, but takes the inner type and number of units, which must be
10128 a power of two. */
10130 tree
10131 build_vector_type (tree innertype, poly_int64 nunits)
10133 return make_vector_type (innertype, nunits, VOIDmode);
10136 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10138 tree
10139 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10141 gcc_assert (mask_mode != BLKmode);
10143 unsigned HOST_WIDE_INT esize;
10144 if (VECTOR_MODE_P (mask_mode))
10146 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10147 esize = vector_element_size (vsize, nunits);
10149 else
10150 esize = 1;
10152 tree bool_type = build_nonstandard_boolean_type (esize);
10154 return make_vector_type (bool_type, nunits, mask_mode);
10157 /* Build a vector type that holds one boolean result for each element of
10158 vector type VECTYPE. The public interface for this operation is
10159 truth_type_for. */
10161 static tree
10162 build_truth_vector_type_for (tree vectype)
10164 machine_mode vector_mode = TYPE_MODE (vectype);
10165 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10167 machine_mode mask_mode;
10168 if (VECTOR_MODE_P (vector_mode)
10169 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10170 return build_truth_vector_type_for_mode (nunits, mask_mode);
10172 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10173 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10174 tree bool_type = build_nonstandard_boolean_type (esize);
10176 return make_vector_type (bool_type, nunits, VOIDmode);
10179 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10180 set. */
10182 tree
10183 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10185 tree t = make_vector_type (innertype, nunits, VOIDmode);
10186 tree cand;
10187 /* We always build the non-opaque variant before the opaque one,
10188 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10189 cand = TYPE_NEXT_VARIANT (t);
10190 if (cand
10191 && TYPE_VECTOR_OPAQUE (cand)
10192 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10193 return cand;
10194 /* Othewise build a variant type and make sure to queue it after
10195 the non-opaque type. */
10196 cand = build_distinct_type_copy (t);
10197 TYPE_VECTOR_OPAQUE (cand) = true;
10198 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10199 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10200 TYPE_NEXT_VARIANT (t) = cand;
10201 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10202 return cand;
10205 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10207 static poly_wide_int
10208 vector_cst_int_elt (const_tree t, unsigned int i)
10210 /* First handle elements that are directly encoded. */
10211 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10212 if (i < encoded_nelts)
10213 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10215 /* Identify the pattern that contains element I and work out the index of
10216 the last encoded element for that pattern. */
10217 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10218 unsigned int pattern = i % npatterns;
10219 unsigned int count = i / npatterns;
10220 unsigned int final_i = encoded_nelts - npatterns + pattern;
10222 /* If there are no steps, the final encoded value is the right one. */
10223 if (!VECTOR_CST_STEPPED_P (t))
10224 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10226 /* Otherwise work out the value from the last two encoded elements. */
10227 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10228 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10229 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10230 return wi::to_poly_wide (v2) + (count - 2) * diff;
10233 /* Return the value of element I of VECTOR_CST T. */
10235 tree
10236 vector_cst_elt (const_tree t, unsigned int i)
10238 /* First handle elements that are directly encoded. */
10239 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10240 if (i < encoded_nelts)
10241 return VECTOR_CST_ENCODED_ELT (t, i);
10243 /* If there are no steps, the final encoded value is the right one. */
10244 if (!VECTOR_CST_STEPPED_P (t))
10246 /* Identify the pattern that contains element I and work out the index of
10247 the last encoded element for that pattern. */
10248 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10249 unsigned int pattern = i % npatterns;
10250 unsigned int final_i = encoded_nelts - npatterns + pattern;
10251 return VECTOR_CST_ENCODED_ELT (t, final_i);
10254 /* Otherwise work out the value from the last two encoded elements. */
10255 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10256 vector_cst_int_elt (t, i));
10259 /* Given an initializer INIT, return TRUE if INIT is zero or some
10260 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10261 null, set *NONZERO if and only if INIT is known not to be all
10262 zeros. The combination of return value of false and *NONZERO
10263 false implies that INIT may but need not be all zeros. Other
10264 combinations indicate definitive answers. */
10266 bool
10267 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10269 bool dummy;
10270 if (!nonzero)
10271 nonzero = &dummy;
10273 /* Conservatively clear NONZERO and set it only if INIT is definitely
10274 not all zero. */
10275 *nonzero = false;
10277 STRIP_NOPS (init);
10279 unsigned HOST_WIDE_INT off = 0;
10281 switch (TREE_CODE (init))
10283 case INTEGER_CST:
10284 if (integer_zerop (init))
10285 return true;
10287 *nonzero = true;
10288 return false;
10290 case REAL_CST:
10291 /* ??? Note that this is not correct for C4X float formats. There,
10292 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10293 negative exponent. */
10294 if (real_zerop (init)
10295 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10296 return true;
10298 *nonzero = true;
10299 return false;
10301 case FIXED_CST:
10302 if (fixed_zerop (init))
10303 return true;
10305 *nonzero = true;
10306 return false;
10308 case COMPLEX_CST:
10309 if (integer_zerop (init)
10310 || (real_zerop (init)
10311 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10312 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10313 return true;
10315 *nonzero = true;
10316 return false;
10318 case VECTOR_CST:
10319 if (VECTOR_CST_NPATTERNS (init) == 1
10320 && VECTOR_CST_DUPLICATE_P (init)
10321 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10322 return true;
10324 *nonzero = true;
10325 return false;
10327 case CONSTRUCTOR:
10329 if (TREE_CLOBBER_P (init))
10330 return false;
10332 unsigned HOST_WIDE_INT idx;
10333 tree elt;
10335 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10336 if (!initializer_zerop (elt, nonzero))
10337 return false;
10339 return true;
10342 case MEM_REF:
10344 tree arg = TREE_OPERAND (init, 0);
10345 if (TREE_CODE (arg) != ADDR_EXPR)
10346 return false;
10347 tree offset = TREE_OPERAND (init, 1);
10348 if (TREE_CODE (offset) != INTEGER_CST
10349 || !tree_fits_uhwi_p (offset))
10350 return false;
10351 off = tree_to_uhwi (offset);
10352 if (INT_MAX < off)
10353 return false;
10354 arg = TREE_OPERAND (arg, 0);
10355 if (TREE_CODE (arg) != STRING_CST)
10356 return false;
10357 init = arg;
10359 /* Fall through. */
10361 case STRING_CST:
10363 gcc_assert (off <= INT_MAX);
10365 int i = off;
10366 int n = TREE_STRING_LENGTH (init);
10367 if (n <= i)
10368 return false;
10370 /* We need to loop through all elements to handle cases like
10371 "\0" and "\0foobar". */
10372 for (i = 0; i < n; ++i)
10373 if (TREE_STRING_POINTER (init)[i] != '\0')
10375 *nonzero = true;
10376 return false;
10379 return true;
10382 default:
10383 return false;
10387 /* Return true if EXPR is an initializer expression in which every element
10388 is a constant that is numerically equal to 0 or 1. The elements do not
10389 need to be equal to each other. */
10391 bool
10392 initializer_each_zero_or_onep (const_tree expr)
10394 STRIP_ANY_LOCATION_WRAPPER (expr);
10396 switch (TREE_CODE (expr))
10398 case INTEGER_CST:
10399 return integer_zerop (expr) || integer_onep (expr);
10401 case REAL_CST:
10402 return real_zerop (expr) || real_onep (expr);
10404 case VECTOR_CST:
10406 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10407 if (VECTOR_CST_STEPPED_P (expr)
10408 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10409 return false;
10411 for (unsigned int i = 0; i < nelts; ++i)
10413 tree elt = vector_cst_elt (expr, i);
10414 if (!initializer_each_zero_or_onep (elt))
10415 return false;
10418 return true;
10421 default:
10422 return false;
10426 /* Check if vector VEC consists of all the equal elements and
10427 that the number of elements corresponds to the type of VEC.
10428 The function returns first element of the vector
10429 or NULL_TREE if the vector is not uniform. */
10430 tree
10431 uniform_vector_p (const_tree vec)
10433 tree first, t;
10434 unsigned HOST_WIDE_INT i, nelts;
10436 if (vec == NULL_TREE)
10437 return NULL_TREE;
10439 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10441 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10442 return TREE_OPERAND (vec, 0);
10444 else if (TREE_CODE (vec) == VECTOR_CST)
10446 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10447 return VECTOR_CST_ENCODED_ELT (vec, 0);
10448 return NULL_TREE;
10451 else if (TREE_CODE (vec) == CONSTRUCTOR
10452 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10454 first = error_mark_node;
10456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10458 if (i == 0)
10460 first = t;
10461 continue;
10463 if (!operand_equal_p (first, t, 0))
10464 return NULL_TREE;
10466 if (i != nelts)
10467 return NULL_TREE;
10469 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10470 return uniform_vector_p (first);
10471 return first;
10474 return NULL_TREE;
10477 /* If the argument is INTEGER_CST, return it. If the argument is vector
10478 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10479 return NULL_TREE.
10480 Look through location wrappers. */
10482 tree
10483 uniform_integer_cst_p (tree t)
10485 STRIP_ANY_LOCATION_WRAPPER (t);
10487 if (TREE_CODE (t) == INTEGER_CST)
10488 return t;
10490 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10492 t = uniform_vector_p (t);
10493 if (t && TREE_CODE (t) == INTEGER_CST)
10494 return t;
10497 return NULL_TREE;
10500 /* Checks to see if T is a constant or a constant vector and if each element E
10501 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10503 tree
10504 bitmask_inv_cst_vector_p (tree t)
10507 tree_code code = TREE_CODE (t);
10508 tree type = TREE_TYPE (t);
10510 if (!INTEGRAL_TYPE_P (type)
10511 && !VECTOR_INTEGER_TYPE_P (type))
10512 return NULL_TREE;
10514 unsigned HOST_WIDE_INT nelts = 1;
10515 tree cst;
10516 unsigned int idx = 0;
10517 bool uniform = uniform_integer_cst_p (t);
10518 tree newtype = unsigned_type_for (type);
10519 tree_vector_builder builder;
10520 if (code == INTEGER_CST)
10521 cst = t;
10522 else
10524 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10525 return NULL_TREE;
10527 cst = vector_cst_elt (t, 0);
10528 builder.new_vector (newtype, nelts, 1);
10531 tree ty = unsigned_type_for (TREE_TYPE (cst));
10535 if (idx > 0)
10536 cst = vector_cst_elt (t, idx);
10537 wide_int icst = wi::to_wide (cst);
10538 wide_int inv = wi::bit_not (icst);
10539 icst = wi::add (1, inv);
10540 if (wi::popcount (icst) != 1)
10541 return NULL_TREE;
10543 tree newcst = wide_int_to_tree (ty, inv);
10545 if (uniform)
10546 return build_uniform_cst (newtype, newcst);
10548 builder.quick_push (newcst);
10550 while (++idx < nelts);
10552 return builder.build ();
10555 /* If VECTOR_CST T has a single nonzero element, return the index of that
10556 element, otherwise return -1. */
10559 single_nonzero_element (const_tree t)
10561 unsigned HOST_WIDE_INT nelts;
10562 unsigned int repeat_nelts;
10563 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10564 repeat_nelts = nelts;
10565 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10567 nelts = vector_cst_encoded_nelts (t);
10568 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10570 else
10571 return -1;
10573 int res = -1;
10574 for (unsigned int i = 0; i < nelts; ++i)
10576 tree elt = vector_cst_elt (t, i);
10577 if (!integer_zerop (elt) && !real_zerop (elt))
10579 if (res >= 0 || i >= repeat_nelts)
10580 return -1;
10581 res = i;
10584 return res;
10587 /* Build an empty statement at location LOC. */
10589 tree
10590 build_empty_stmt (location_t loc)
10592 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10593 SET_EXPR_LOCATION (t, loc);
10594 return t;
10598 /* Build an OMP clause with code CODE. LOC is the location of the
10599 clause. */
10601 tree
10602 build_omp_clause (location_t loc, enum omp_clause_code code)
10604 tree t;
10605 int size, length;
10607 length = omp_clause_num_ops[code];
10608 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10610 record_node_allocation_statistics (OMP_CLAUSE, size);
10612 t = (tree) ggc_internal_alloc (size);
10613 memset (t, 0, size);
10614 TREE_SET_CODE (t, OMP_CLAUSE);
10615 OMP_CLAUSE_SET_CODE (t, code);
10616 OMP_CLAUSE_LOCATION (t) = loc;
10618 return t;
10621 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10622 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10623 Except for the CODE and operand count field, other storage for the
10624 object is initialized to zeros. */
10626 tree
10627 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10629 tree t;
10630 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10632 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10633 gcc_assert (len >= 1);
10635 record_node_allocation_statistics (code, length);
10637 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10639 TREE_SET_CODE (t, code);
10641 /* Can't use TREE_OPERAND to store the length because if checking is
10642 enabled, it will try to check the length before we store it. :-P */
10643 t->exp.operands[0] = build_int_cst (sizetype, len);
10645 return t;
10648 /* Helper function for build_call_* functions; build a CALL_EXPR with
10649 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10650 the argument slots. */
10652 static tree
10653 build_call_1 (tree return_type, tree fn, int nargs)
10655 tree t;
10657 t = build_vl_exp (CALL_EXPR, nargs + 3);
10658 TREE_TYPE (t) = return_type;
10659 CALL_EXPR_FN (t) = fn;
10660 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10662 return t;
10665 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10666 FN and a null static chain slot. NARGS is the number of call arguments
10667 which are specified as "..." arguments. */
10669 tree
10670 build_call_nary (tree return_type, tree fn, int nargs, ...)
10672 tree ret;
10673 va_list args;
10674 va_start (args, nargs);
10675 ret = build_call_valist (return_type, fn, nargs, args);
10676 va_end (args);
10677 return ret;
10680 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10681 FN and a null static chain slot. NARGS is the number of call arguments
10682 which are specified as a va_list ARGS. */
10684 tree
10685 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10687 tree t;
10688 int i;
10690 t = build_call_1 (return_type, fn, nargs);
10691 for (i = 0; i < nargs; i++)
10692 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10693 process_call_operands (t);
10694 return t;
10697 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10698 FN and a null static chain slot. NARGS is the number of call arguments
10699 which are specified as a tree array ARGS. */
10701 tree
10702 build_call_array_loc (location_t loc, tree return_type, tree fn,
10703 int nargs, const tree *args)
10705 tree t;
10706 int i;
10708 t = build_call_1 (return_type, fn, nargs);
10709 for (i = 0; i < nargs; i++)
10710 CALL_EXPR_ARG (t, i) = args[i];
10711 process_call_operands (t);
10712 SET_EXPR_LOCATION (t, loc);
10713 return t;
10716 /* Like build_call_array, but takes a vec. */
10718 tree
10719 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10721 tree ret, t;
10722 unsigned int ix;
10724 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10725 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10726 CALL_EXPR_ARG (ret, ix) = t;
10727 process_call_operands (ret);
10728 return ret;
10731 /* Conveniently construct a function call expression. FNDECL names the
10732 function to be called and N arguments are passed in the array
10733 ARGARRAY. */
10735 tree
10736 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10738 tree fntype = TREE_TYPE (fndecl);
10739 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10741 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10744 /* Conveniently construct a function call expression. FNDECL names the
10745 function to be called and the arguments are passed in the vector
10746 VEC. */
10748 tree
10749 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10751 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10752 vec_safe_address (vec));
10756 /* Conveniently construct a function call expression. FNDECL names the
10757 function to be called, N is the number of arguments, and the "..."
10758 parameters are the argument expressions. */
10760 tree
10761 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10763 va_list ap;
10764 tree *argarray = XALLOCAVEC (tree, n);
10765 int i;
10767 va_start (ap, n);
10768 for (i = 0; i < n; i++)
10769 argarray[i] = va_arg (ap, tree);
10770 va_end (ap);
10771 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10774 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10775 varargs macros aren't supported by all bootstrap compilers. */
10777 tree
10778 build_call_expr (tree fndecl, int n, ...)
10780 va_list ap;
10781 tree *argarray = XALLOCAVEC (tree, n);
10782 int i;
10784 va_start (ap, n);
10785 for (i = 0; i < n; i++)
10786 argarray[i] = va_arg (ap, tree);
10787 va_end (ap);
10788 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10791 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10792 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10793 It will get gimplified later into an ordinary internal function. */
10795 tree
10796 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10797 tree type, int n, const tree *args)
10799 tree t = build_call_1 (type, NULL_TREE, n);
10800 for (int i = 0; i < n; ++i)
10801 CALL_EXPR_ARG (t, i) = args[i];
10802 SET_EXPR_LOCATION (t, loc);
10803 CALL_EXPR_IFN (t) = ifn;
10804 process_call_operands (t);
10805 return t;
10808 /* Build internal call expression. This is just like CALL_EXPR, except
10809 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10810 internal function. */
10812 tree
10813 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10814 tree type, int n, ...)
10816 va_list ap;
10817 tree *argarray = XALLOCAVEC (tree, n);
10818 int i;
10820 va_start (ap, n);
10821 for (i = 0; i < n; i++)
10822 argarray[i] = va_arg (ap, tree);
10823 va_end (ap);
10824 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10827 /* Return a function call to FN, if the target is guaranteed to support it,
10828 or null otherwise.
10830 N is the number of arguments, passed in the "...", and TYPE is the
10831 type of the return value. */
10833 tree
10834 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10835 int n, ...)
10837 va_list ap;
10838 tree *argarray = XALLOCAVEC (tree, n);
10839 int i;
10841 va_start (ap, n);
10842 for (i = 0; i < n; i++)
10843 argarray[i] = va_arg (ap, tree);
10844 va_end (ap);
10845 if (internal_fn_p (fn))
10847 internal_fn ifn = as_internal_fn (fn);
10848 if (direct_internal_fn_p (ifn))
10850 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10851 if (!direct_internal_fn_supported_p (ifn, types,
10852 OPTIMIZE_FOR_BOTH))
10853 return NULL_TREE;
10855 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10857 else
10859 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10860 if (!fndecl)
10861 return NULL_TREE;
10862 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10866 /* Return a function call to the appropriate builtin alloca variant.
10868 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10869 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10870 bound for SIZE in case it is not a fixed value. */
10872 tree
10873 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10875 if (max_size >= 0)
10877 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10878 return
10879 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10881 else if (align > 0)
10883 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10884 return build_call_expr (t, 2, size, size_int (align));
10886 else
10888 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10889 return build_call_expr (t, 1, size);
10893 /* The built-in decl to use to mark code points believed to be unreachable.
10894 Typically __builtin_unreachable, but __builtin_trap if
10895 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10896 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10897 appropriate ubsan function. When building a call directly, use
10898 {gimple_},build_builtin_unreachable instead. */
10900 tree
10901 builtin_decl_unreachable ()
10903 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10905 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10906 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10907 : flag_unreachable_traps)
10908 fncode = BUILT_IN_UNREACHABLE_TRAP;
10909 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10910 in the sanopt pass. */
10912 return builtin_decl_explicit (fncode);
10915 /* Build a call to __builtin_unreachable, possibly rewritten by
10916 -fsanitize=unreachable. Use this rather than the above when practical. */
10918 tree
10919 build_builtin_unreachable (location_t loc)
10921 tree data = NULL_TREE;
10922 tree fn = sanitize_unreachable_fn (&data, loc);
10923 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10926 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10927 if SIZE == -1) and return a tree node representing char* pointer to
10928 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10929 the STRING_CST value is the LEN bytes at STR (the representation
10930 of the string, which may be wide). Otherwise it's all zeros. */
10932 tree
10933 build_string_literal (unsigned len, const char *str /* = NULL */,
10934 tree eltype /* = char_type_node */,
10935 unsigned HOST_WIDE_INT size /* = -1 */)
10937 tree t = build_string (len, str);
10938 /* Set the maximum valid index based on the string length or SIZE. */
10939 unsigned HOST_WIDE_INT maxidx
10940 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10942 tree index = build_index_type (size_int (maxidx));
10943 eltype = build_type_variant (eltype, 1, 0);
10944 tree type = build_array_type (eltype, index);
10945 TREE_TYPE (t) = type;
10946 TREE_CONSTANT (t) = 1;
10947 TREE_READONLY (t) = 1;
10948 TREE_STATIC (t) = 1;
10950 type = build_pointer_type (eltype);
10951 t = build1 (ADDR_EXPR, type,
10952 build4 (ARRAY_REF, eltype,
10953 t, integer_zero_node, NULL_TREE, NULL_TREE));
10954 return t;
10959 /* Return true if T (assumed to be a DECL) must be assigned a memory
10960 location. */
10962 bool
10963 needs_to_live_in_memory (const_tree t)
10965 return (TREE_ADDRESSABLE (t)
10966 || is_global_var (t)
10967 || (TREE_CODE (t) == RESULT_DECL
10968 && !DECL_BY_REFERENCE (t)
10969 && aggregate_value_p (t, current_function_decl)));
10972 /* Return value of a constant X and sign-extend it. */
10974 HOST_WIDE_INT
10975 int_cst_value (const_tree x)
10977 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10978 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10980 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10981 gcc_assert (cst_and_fits_in_hwi (x));
10983 if (bits < HOST_BITS_PER_WIDE_INT)
10985 bool negative = ((val >> (bits - 1)) & 1) != 0;
10986 if (negative)
10987 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10988 else
10989 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10992 return val;
10995 /* If TYPE is an integral or pointer type, return an integer type with
10996 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10997 if TYPE is already an integer type of signedness UNSIGNEDP.
10998 If TYPE is a floating-point type, return an integer type with the same
10999 bitsize and with the signedness given by UNSIGNEDP; this is useful
11000 when doing bit-level operations on a floating-point value. */
11002 tree
11003 signed_or_unsigned_type_for (int unsignedp, tree type)
11005 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11006 return type;
11008 if (TREE_CODE (type) == VECTOR_TYPE)
11010 tree inner = TREE_TYPE (type);
11011 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11012 if (!inner2)
11013 return NULL_TREE;
11014 if (inner == inner2)
11015 return type;
11016 machine_mode new_mode;
11017 if (VECTOR_MODE_P (TYPE_MODE (type))
11018 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11019 return build_vector_type_for_mode (inner2, new_mode);
11020 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11023 if (TREE_CODE (type) == COMPLEX_TYPE)
11025 tree inner = TREE_TYPE (type);
11026 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11027 if (!inner2)
11028 return NULL_TREE;
11029 if (inner == inner2)
11030 return type;
11031 return build_complex_type (inner2);
11034 unsigned int bits;
11035 if (INTEGRAL_TYPE_P (type)
11036 || POINTER_TYPE_P (type)
11037 || TREE_CODE (type) == OFFSET_TYPE)
11038 bits = TYPE_PRECISION (type);
11039 else if (TREE_CODE (type) == REAL_TYPE)
11040 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11041 else
11042 return NULL_TREE;
11044 return build_nonstandard_integer_type (bits, unsignedp);
11047 /* If TYPE is an integral or pointer type, return an integer type with
11048 the same precision which is unsigned, or itself if TYPE is already an
11049 unsigned integer type. If TYPE is a floating-point type, return an
11050 unsigned integer type with the same bitsize as TYPE. */
11052 tree
11053 unsigned_type_for (tree type)
11055 return signed_or_unsigned_type_for (1, type);
11058 /* If TYPE is an integral or pointer type, return an integer type with
11059 the same precision which is signed, or itself if TYPE is already a
11060 signed integer type. If TYPE is a floating-point type, return a
11061 signed integer type with the same bitsize as TYPE. */
11063 tree
11064 signed_type_for (tree type)
11066 return signed_or_unsigned_type_for (0, type);
11069 /* - For VECTOR_TYPEs:
11070 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11071 - The number of elements must match (known_eq).
11072 - targetm.vectorize.get_mask_mode exists, and exactly
11073 the same mode as the truth type.
11074 - Otherwise, the truth type must be a BOOLEAN_TYPE
11075 or useless_type_conversion_p to BOOLEAN_TYPE. */
11076 bool
11077 is_truth_type_for (tree type, tree truth_type)
11079 machine_mode mask_mode = TYPE_MODE (truth_type);
11080 machine_mode vmode = TYPE_MODE (type);
11081 machine_mode tmask_mode;
11083 if (TREE_CODE (type) == VECTOR_TYPE)
11085 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11086 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11087 TYPE_VECTOR_SUBPARTS (truth_type))
11088 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11089 && tmask_mode == mask_mode)
11090 return true;
11092 return false;
11095 return useless_type_conversion_p (boolean_type_node, truth_type);
11098 /* If TYPE is a vector type, return a signed integer vector type with the
11099 same width and number of subparts. Otherwise return boolean_type_node. */
11101 tree
11102 truth_type_for (tree type)
11104 if (TREE_CODE (type) == VECTOR_TYPE)
11106 if (VECTOR_BOOLEAN_TYPE_P (type))
11107 return type;
11108 return build_truth_vector_type_for (type);
11110 else
11111 return boolean_type_node;
11114 /* Returns the largest value obtainable by casting something in INNER type to
11115 OUTER type. */
11117 tree
11118 upper_bound_in_type (tree outer, tree inner)
11120 unsigned int det = 0;
11121 unsigned oprec = TYPE_PRECISION (outer);
11122 unsigned iprec = TYPE_PRECISION (inner);
11123 unsigned prec;
11125 /* Compute a unique number for every combination. */
11126 det |= (oprec > iprec) ? 4 : 0;
11127 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11128 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11130 /* Determine the exponent to use. */
11131 switch (det)
11133 case 0:
11134 case 1:
11135 /* oprec <= iprec, outer: signed, inner: don't care. */
11136 prec = oprec - 1;
11137 break;
11138 case 2:
11139 case 3:
11140 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11141 prec = oprec;
11142 break;
11143 case 4:
11144 /* oprec > iprec, outer: signed, inner: signed. */
11145 prec = iprec - 1;
11146 break;
11147 case 5:
11148 /* oprec > iprec, outer: signed, inner: unsigned. */
11149 prec = iprec;
11150 break;
11151 case 6:
11152 /* oprec > iprec, outer: unsigned, inner: signed. */
11153 prec = oprec;
11154 break;
11155 case 7:
11156 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11157 prec = iprec;
11158 break;
11159 default:
11160 gcc_unreachable ();
11163 return wide_int_to_tree (outer,
11164 wi::mask (prec, false, TYPE_PRECISION (outer)));
11167 /* Returns the smallest value obtainable by casting something in INNER type to
11168 OUTER type. */
11170 tree
11171 lower_bound_in_type (tree outer, tree inner)
11173 unsigned oprec = TYPE_PRECISION (outer);
11174 unsigned iprec = TYPE_PRECISION (inner);
11176 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11177 and obtain 0. */
11178 if (TYPE_UNSIGNED (outer)
11179 /* If we are widening something of an unsigned type, OUTER type
11180 contains all values of INNER type. In particular, both INNER
11181 and OUTER types have zero in common. */
11182 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11183 return build_int_cst (outer, 0);
11184 else
11186 /* If we are widening a signed type to another signed type, we
11187 want to obtain -2^^(iprec-1). If we are keeping the
11188 precision or narrowing to a signed type, we want to obtain
11189 -2^(oprec-1). */
11190 unsigned prec = oprec > iprec ? iprec : oprec;
11191 return wide_int_to_tree (outer,
11192 wi::mask (prec - 1, true,
11193 TYPE_PRECISION (outer)));
11197 /* Return nonzero if two operands that are suitable for PHI nodes are
11198 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11199 SSA_NAME or invariant. Note that this is strictly an optimization.
11200 That is, callers of this function can directly call operand_equal_p
11201 and get the same result, only slower. */
11204 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11206 if (arg0 == arg1)
11207 return 1;
11208 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11209 return 0;
11210 return operand_equal_p (arg0, arg1, 0);
11213 /* Returns number of zeros at the end of binary representation of X. */
11215 tree
11216 num_ending_zeros (const_tree x)
11218 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11222 #define WALK_SUBTREE(NODE) \
11223 do \
11225 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11226 if (result) \
11227 return result; \
11229 while (0)
11231 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11232 be walked whenever a type is seen in the tree. Rest of operands and return
11233 value are as for walk_tree. */
11235 static tree
11236 walk_type_fields (tree type, walk_tree_fn func, void *data,
11237 hash_set<tree> *pset, walk_tree_lh lh)
11239 tree result = NULL_TREE;
11241 switch (TREE_CODE (type))
11243 case POINTER_TYPE:
11244 case REFERENCE_TYPE:
11245 case VECTOR_TYPE:
11246 /* We have to worry about mutually recursive pointers. These can't
11247 be written in C. They can in Ada. It's pathological, but
11248 there's an ACATS test (c38102a) that checks it. Deal with this
11249 by checking if we're pointing to another pointer, that one
11250 points to another pointer, that one does too, and we have no htab.
11251 If so, get a hash table. We check three levels deep to avoid
11252 the cost of the hash table if we don't need one. */
11253 if (POINTER_TYPE_P (TREE_TYPE (type))
11254 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11255 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11256 && !pset)
11258 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11259 func, data);
11260 if (result)
11261 return result;
11263 break;
11266 /* fall through */
11268 case COMPLEX_TYPE:
11269 WALK_SUBTREE (TREE_TYPE (type));
11270 break;
11272 case METHOD_TYPE:
11273 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11275 /* Fall through. */
11277 case FUNCTION_TYPE:
11278 WALK_SUBTREE (TREE_TYPE (type));
11280 tree arg;
11282 /* We never want to walk into default arguments. */
11283 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11284 WALK_SUBTREE (TREE_VALUE (arg));
11286 break;
11288 case ARRAY_TYPE:
11289 /* Don't follow this nodes's type if a pointer for fear that
11290 we'll have infinite recursion. If we have a PSET, then we
11291 need not fear. */
11292 if (pset
11293 || (!POINTER_TYPE_P (TREE_TYPE (type))
11294 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11295 WALK_SUBTREE (TREE_TYPE (type));
11296 WALK_SUBTREE (TYPE_DOMAIN (type));
11297 break;
11299 case OFFSET_TYPE:
11300 WALK_SUBTREE (TREE_TYPE (type));
11301 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11302 break;
11304 default:
11305 break;
11308 return NULL_TREE;
11311 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11312 called with the DATA and the address of each sub-tree. If FUNC returns a
11313 non-NULL value, the traversal is stopped, and the value returned by FUNC
11314 is returned. If PSET is non-NULL it is used to record the nodes visited,
11315 and to avoid visiting a node more than once. */
11317 tree
11318 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11319 hash_set<tree> *pset, walk_tree_lh lh)
11321 #define WALK_SUBTREE_TAIL(NODE) \
11322 do \
11324 tp = & (NODE); \
11325 goto tail_recurse; \
11327 while (0)
11329 tail_recurse:
11330 /* Skip empty subtrees. */
11331 if (!*tp)
11332 return NULL_TREE;
11334 /* Don't walk the same tree twice, if the user has requested
11335 that we avoid doing so. */
11336 if (pset && pset->add (*tp))
11337 return NULL_TREE;
11339 /* Call the function. */
11340 int walk_subtrees = 1;
11341 tree result = (*func) (tp, &walk_subtrees, data);
11343 /* If we found something, return it. */
11344 if (result)
11345 return result;
11347 tree t = *tp;
11348 tree_code code = TREE_CODE (t);
11350 /* Even if we didn't, FUNC may have decided that there was nothing
11351 interesting below this point in the tree. */
11352 if (!walk_subtrees)
11354 /* But we still need to check our siblings. */
11355 if (code == TREE_LIST)
11356 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11357 else if (code == OMP_CLAUSE)
11358 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11359 else
11360 return NULL_TREE;
11363 if (lh)
11365 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11366 if (result || !walk_subtrees)
11367 return result;
11370 switch (code)
11372 case ERROR_MARK:
11373 case IDENTIFIER_NODE:
11374 case INTEGER_CST:
11375 case REAL_CST:
11376 case FIXED_CST:
11377 case STRING_CST:
11378 case BLOCK:
11379 case PLACEHOLDER_EXPR:
11380 case SSA_NAME:
11381 case FIELD_DECL:
11382 case RESULT_DECL:
11383 /* None of these have subtrees other than those already walked
11384 above. */
11385 break;
11387 case TREE_LIST:
11388 WALK_SUBTREE (TREE_VALUE (t));
11389 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11391 case TREE_VEC:
11393 int len = TREE_VEC_LENGTH (t);
11395 if (len == 0)
11396 break;
11398 /* Walk all elements but the last. */
11399 for (int i = 0; i < len - 1; ++i)
11400 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11402 /* Now walk the last one as a tail call. */
11403 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11406 case VECTOR_CST:
11408 unsigned len = vector_cst_encoded_nelts (t);
11409 if (len == 0)
11410 break;
11411 /* Walk all elements but the last. */
11412 for (unsigned i = 0; i < len - 1; ++i)
11413 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11414 /* Now walk the last one as a tail call. */
11415 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11418 case COMPLEX_CST:
11419 WALK_SUBTREE (TREE_REALPART (t));
11420 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11422 case CONSTRUCTOR:
11424 unsigned HOST_WIDE_INT idx;
11425 constructor_elt *ce;
11427 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11428 idx++)
11429 WALK_SUBTREE (ce->value);
11431 break;
11433 case SAVE_EXPR:
11434 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11436 case BIND_EXPR:
11438 tree decl;
11439 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11441 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11442 into declarations that are just mentioned, rather than
11443 declared; they don't really belong to this part of the tree.
11444 And, we can see cycles: the initializer for a declaration
11445 can refer to the declaration itself. */
11446 WALK_SUBTREE (DECL_INITIAL (decl));
11447 WALK_SUBTREE (DECL_SIZE (decl));
11448 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11450 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11453 case STATEMENT_LIST:
11455 tree_stmt_iterator i;
11456 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11457 WALK_SUBTREE (*tsi_stmt_ptr (i));
11459 break;
11461 case OMP_CLAUSE:
11463 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11464 for (int i = 0; i < len; i++)
11465 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11466 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11469 case TARGET_EXPR:
11471 int i, len;
11473 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11474 But, we only want to walk once. */
11475 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11476 for (i = 0; i < len; ++i)
11477 WALK_SUBTREE (TREE_OPERAND (t, i));
11478 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11481 case DECL_EXPR:
11482 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11483 defining. We only want to walk into these fields of a type in this
11484 case and not in the general case of a mere reference to the type.
11486 The criterion is as follows: if the field can be an expression, it
11487 must be walked only here. This should be in keeping with the fields
11488 that are directly gimplified in gimplify_type_sizes in order for the
11489 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11490 variable-sized types.
11492 Note that DECLs get walked as part of processing the BIND_EXPR. */
11493 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11495 /* Call the function for the decl so e.g. copy_tree_body_r can
11496 replace it with the remapped one. */
11497 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11498 if (result || !walk_subtrees)
11499 return result;
11501 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11502 if (TREE_CODE (*type_p) == ERROR_MARK)
11503 return NULL_TREE;
11505 /* Call the function for the type. See if it returns anything or
11506 doesn't want us to continue. If we are to continue, walk both
11507 the normal fields and those for the declaration case. */
11508 result = (*func) (type_p, &walk_subtrees, data);
11509 if (result || !walk_subtrees)
11510 return result;
11512 tree type = *type_p;
11514 /* But do not walk a pointed-to type since it may itself need to
11515 be walked in the declaration case if it isn't anonymous. */
11516 if (!POINTER_TYPE_P (type))
11518 result = walk_type_fields (type, func, data, pset, lh);
11519 if (result)
11520 return result;
11523 /* If this is a record type, also walk the fields. */
11524 if (RECORD_OR_UNION_TYPE_P (type))
11526 tree field;
11528 for (field = TYPE_FIELDS (type); field;
11529 field = DECL_CHAIN (field))
11531 /* We'd like to look at the type of the field, but we can
11532 easily get infinite recursion. So assume it's pointed
11533 to elsewhere in the tree. Also, ignore things that
11534 aren't fields. */
11535 if (TREE_CODE (field) != FIELD_DECL)
11536 continue;
11538 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11539 WALK_SUBTREE (DECL_SIZE (field));
11540 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11541 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11542 WALK_SUBTREE (DECL_QUALIFIER (field));
11546 /* Same for scalar types. */
11547 else if (TREE_CODE (type) == BOOLEAN_TYPE
11548 || TREE_CODE (type) == ENUMERAL_TYPE
11549 || TREE_CODE (type) == INTEGER_TYPE
11550 || TREE_CODE (type) == FIXED_POINT_TYPE
11551 || TREE_CODE (type) == REAL_TYPE)
11553 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11554 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11557 WALK_SUBTREE (TYPE_SIZE (type));
11558 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11560 /* FALLTHRU */
11562 default:
11563 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11565 int i, len;
11567 /* Walk over all the sub-trees of this operand. */
11568 len = TREE_OPERAND_LENGTH (t);
11570 /* Go through the subtrees. We need to do this in forward order so
11571 that the scope of a FOR_EXPR is handled properly. */
11572 if (len)
11574 for (i = 0; i < len - 1; ++i)
11575 WALK_SUBTREE (TREE_OPERAND (t, i));
11576 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11579 /* If this is a type, walk the needed fields in the type. */
11580 else if (TYPE_P (t))
11581 return walk_type_fields (t, func, data, pset, lh);
11582 break;
11585 /* We didn't find what we were looking for. */
11586 return NULL_TREE;
11588 #undef WALK_SUBTREE_TAIL
11590 #undef WALK_SUBTREE
11592 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11594 tree
11595 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11596 walk_tree_lh lh)
11598 tree result;
11600 hash_set<tree> pset;
11601 result = walk_tree_1 (tp, func, data, &pset, lh);
11602 return result;
11606 tree
11607 tree_block (tree t)
11609 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11611 if (IS_EXPR_CODE_CLASS (c))
11612 return LOCATION_BLOCK (t->exp.locus);
11613 gcc_unreachable ();
11614 return NULL;
11617 void
11618 tree_set_block (tree t, tree b)
11620 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11622 if (IS_EXPR_CODE_CLASS (c))
11624 t->exp.locus = set_block (t->exp.locus, b);
11626 else
11627 gcc_unreachable ();
11630 /* Create a nameless artificial label and put it in the current
11631 function context. The label has a location of LOC. Returns the
11632 newly created label. */
11634 tree
11635 create_artificial_label (location_t loc)
11637 tree lab = build_decl (loc,
11638 LABEL_DECL, NULL_TREE, void_type_node);
11640 DECL_ARTIFICIAL (lab) = 1;
11641 DECL_IGNORED_P (lab) = 1;
11642 DECL_CONTEXT (lab) = current_function_decl;
11643 return lab;
11646 /* Given a tree, try to return a useful variable name that we can use
11647 to prefix a temporary that is being assigned the value of the tree.
11648 I.E. given <temp> = &A, return A. */
11650 const char *
11651 get_name (tree t)
11653 tree stripped_decl;
11655 stripped_decl = t;
11656 STRIP_NOPS (stripped_decl);
11657 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11658 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11659 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11661 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11662 if (!name)
11663 return NULL;
11664 return IDENTIFIER_POINTER (name);
11666 else
11668 switch (TREE_CODE (stripped_decl))
11670 case ADDR_EXPR:
11671 return get_name (TREE_OPERAND (stripped_decl, 0));
11672 default:
11673 return NULL;
11678 /* Return true if TYPE has a variable argument list. */
11680 bool
11681 stdarg_p (const_tree fntype)
11683 function_args_iterator args_iter;
11684 tree n = NULL_TREE, t;
11686 if (!fntype)
11687 return false;
11689 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11690 return true;
11692 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11694 n = t;
11697 return n != NULL_TREE && n != void_type_node;
11700 /* Return true if TYPE has a prototype. */
11702 bool
11703 prototype_p (const_tree fntype)
11705 tree t;
11707 gcc_assert (fntype != NULL_TREE);
11709 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11710 return true;
11712 t = TYPE_ARG_TYPES (fntype);
11713 return (t != NULL_TREE);
11716 /* If BLOCK is inlined from an __attribute__((__artificial__))
11717 routine, return pointer to location from where it has been
11718 called. */
11719 location_t *
11720 block_nonartificial_location (tree block)
11722 location_t *ret = NULL;
11724 while (block && TREE_CODE (block) == BLOCK
11725 && BLOCK_ABSTRACT_ORIGIN (block))
11727 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11728 if (TREE_CODE (ao) == FUNCTION_DECL)
11730 /* If AO is an artificial inline, point RET to the
11731 call site locus at which it has been inlined and continue
11732 the loop, in case AO's caller is also an artificial
11733 inline. */
11734 if (DECL_DECLARED_INLINE_P (ao)
11735 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11736 ret = &BLOCK_SOURCE_LOCATION (block);
11737 else
11738 break;
11740 else if (TREE_CODE (ao) != BLOCK)
11741 break;
11743 block = BLOCK_SUPERCONTEXT (block);
11745 return ret;
11749 /* If EXP is inlined from an __attribute__((__artificial__))
11750 function, return the location of the original call expression. */
11752 location_t
11753 tree_nonartificial_location (tree exp)
11755 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11757 if (loc)
11758 return *loc;
11759 else
11760 return EXPR_LOCATION (exp);
11763 /* Return the location into which EXP has been inlined. Analogous
11764 to tree_nonartificial_location() above but not limited to artificial
11765 functions declared inline. If SYSTEM_HEADER is true, return
11766 the macro expansion point of the location if it's in a system header */
11768 location_t
11769 tree_inlined_location (tree exp, bool system_header /* = true */)
11771 location_t loc = UNKNOWN_LOCATION;
11773 tree block = TREE_BLOCK (exp);
11775 while (block && TREE_CODE (block) == BLOCK
11776 && BLOCK_ABSTRACT_ORIGIN (block))
11778 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11779 if (TREE_CODE (ao) == FUNCTION_DECL)
11780 loc = BLOCK_SOURCE_LOCATION (block);
11781 else if (TREE_CODE (ao) != BLOCK)
11782 break;
11784 block = BLOCK_SUPERCONTEXT (block);
11787 if (loc == UNKNOWN_LOCATION)
11789 loc = EXPR_LOCATION (exp);
11790 if (system_header)
11791 /* Only consider macro expansion when the block traversal failed
11792 to find a location. Otherwise it's not relevant. */
11793 return expansion_point_location_if_in_system_header (loc);
11796 return loc;
11799 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11800 nodes. */
11802 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11804 hashval_t
11805 cl_option_hasher::hash (tree x)
11807 const_tree const t = x;
11809 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11810 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11811 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11812 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11813 else
11814 gcc_unreachable ();
11817 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11818 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11819 same. */
11821 bool
11822 cl_option_hasher::equal (tree x, tree y)
11824 const_tree const xt = x;
11825 const_tree const yt = y;
11827 if (TREE_CODE (xt) != TREE_CODE (yt))
11828 return 0;
11830 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11831 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11832 TREE_OPTIMIZATION (yt));
11833 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11834 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11835 TREE_TARGET_OPTION (yt));
11836 else
11837 gcc_unreachable ();
11840 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11842 tree
11843 build_optimization_node (struct gcc_options *opts,
11844 struct gcc_options *opts_set)
11846 tree t;
11848 /* Use the cache of optimization nodes. */
11850 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11851 opts, opts_set);
11853 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11854 t = *slot;
11855 if (!t)
11857 /* Insert this one into the hash table. */
11858 t = cl_optimization_node;
11859 *slot = t;
11861 /* Make a new node for next time round. */
11862 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11865 return t;
11868 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11870 tree
11871 build_target_option_node (struct gcc_options *opts,
11872 struct gcc_options *opts_set)
11874 tree t;
11876 /* Use the cache of optimization nodes. */
11878 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11879 opts, opts_set);
11881 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11882 t = *slot;
11883 if (!t)
11885 /* Insert this one into the hash table. */
11886 t = cl_target_option_node;
11887 *slot = t;
11889 /* Make a new node for next time round. */
11890 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11893 return t;
11896 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11897 so that they aren't saved during PCH writing. */
11899 void
11900 prepare_target_option_nodes_for_pch (void)
11902 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11903 for (; iter != cl_option_hash_table->end (); ++iter)
11904 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11905 TREE_TARGET_GLOBALS (*iter) = NULL;
11908 /* Determine the "ultimate origin" of a block. */
11910 tree
11911 block_ultimate_origin (const_tree block)
11913 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11915 if (origin == NULL_TREE)
11916 return NULL_TREE;
11917 else
11919 gcc_checking_assert ((DECL_P (origin)
11920 && DECL_ORIGIN (origin) == origin)
11921 || BLOCK_ORIGIN (origin) == origin);
11922 return origin;
11926 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11927 no instruction. */
11929 bool
11930 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11932 /* Do not strip casts into or out of differing address spaces. */
11933 if (POINTER_TYPE_P (outer_type)
11934 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11936 if (!POINTER_TYPE_P (inner_type)
11937 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11938 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11939 return false;
11941 else if (POINTER_TYPE_P (inner_type)
11942 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11944 /* We already know that outer_type is not a pointer with
11945 a non-generic address space. */
11946 return false;
11949 /* Use precision rather then machine mode when we can, which gives
11950 the correct answer even for submode (bit-field) types. */
11951 if ((INTEGRAL_TYPE_P (outer_type)
11952 || POINTER_TYPE_P (outer_type)
11953 || TREE_CODE (outer_type) == OFFSET_TYPE)
11954 && (INTEGRAL_TYPE_P (inner_type)
11955 || POINTER_TYPE_P (inner_type)
11956 || TREE_CODE (inner_type) == OFFSET_TYPE))
11957 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11959 /* Otherwise fall back on comparing machine modes (e.g. for
11960 aggregate types, floats). */
11961 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11964 /* Return true iff conversion in EXP generates no instruction. Mark
11965 it inline so that we fully inline into the stripping functions even
11966 though we have two uses of this function. */
11968 static inline bool
11969 tree_nop_conversion (const_tree exp)
11971 tree outer_type, inner_type;
11973 if (location_wrapper_p (exp))
11974 return true;
11975 if (!CONVERT_EXPR_P (exp)
11976 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11977 return false;
11979 outer_type = TREE_TYPE (exp);
11980 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11981 if (!inner_type || inner_type == error_mark_node)
11982 return false;
11984 return tree_nop_conversion_p (outer_type, inner_type);
11987 /* Return true iff conversion in EXP generates no instruction. Don't
11988 consider conversions changing the signedness. */
11990 static bool
11991 tree_sign_nop_conversion (const_tree exp)
11993 tree outer_type, inner_type;
11995 if (!tree_nop_conversion (exp))
11996 return false;
11998 outer_type = TREE_TYPE (exp);
11999 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12001 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12002 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12005 /* Strip conversions from EXP according to tree_nop_conversion and
12006 return the resulting expression. */
12008 tree
12009 tree_strip_nop_conversions (tree exp)
12011 while (tree_nop_conversion (exp))
12012 exp = TREE_OPERAND (exp, 0);
12013 return exp;
12016 /* Strip conversions from EXP according to tree_sign_nop_conversion
12017 and return the resulting expression. */
12019 tree
12020 tree_strip_sign_nop_conversions (tree exp)
12022 while (tree_sign_nop_conversion (exp))
12023 exp = TREE_OPERAND (exp, 0);
12024 return exp;
12027 /* Avoid any floating point extensions from EXP. */
12028 tree
12029 strip_float_extensions (tree exp)
12031 tree sub, expt, subt;
12033 /* For floating point constant look up the narrowest type that can hold
12034 it properly and handle it like (type)(narrowest_type)constant.
12035 This way we can optimize for instance a=a*2.0 where "a" is float
12036 but 2.0 is double constant. */
12037 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12039 REAL_VALUE_TYPE orig;
12040 tree type = NULL;
12042 orig = TREE_REAL_CST (exp);
12043 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12044 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12045 type = float_type_node;
12046 else if (TYPE_PRECISION (TREE_TYPE (exp))
12047 > TYPE_PRECISION (double_type_node)
12048 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12049 type = double_type_node;
12050 if (type)
12051 return build_real_truncate (type, orig);
12054 if (!CONVERT_EXPR_P (exp))
12055 return exp;
12057 sub = TREE_OPERAND (exp, 0);
12058 subt = TREE_TYPE (sub);
12059 expt = TREE_TYPE (exp);
12061 if (!FLOAT_TYPE_P (subt))
12062 return exp;
12064 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12065 return exp;
12067 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12068 return exp;
12070 return strip_float_extensions (sub);
12073 /* Strip out all handled components that produce invariant
12074 offsets. */
12076 const_tree
12077 strip_invariant_refs (const_tree op)
12079 while (handled_component_p (op))
12081 switch (TREE_CODE (op))
12083 case ARRAY_REF:
12084 case ARRAY_RANGE_REF:
12085 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12086 || TREE_OPERAND (op, 2) != NULL_TREE
12087 || TREE_OPERAND (op, 3) != NULL_TREE)
12088 return NULL;
12089 break;
12091 case COMPONENT_REF:
12092 if (TREE_OPERAND (op, 2) != NULL_TREE)
12093 return NULL;
12094 break;
12096 default:;
12098 op = TREE_OPERAND (op, 0);
12101 return op;
12104 /* Strip handled components with zero offset from OP. */
12106 tree
12107 strip_zero_offset_components (tree op)
12109 while (TREE_CODE (op) == COMPONENT_REF
12110 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12111 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12112 op = TREE_OPERAND (op, 0);
12113 return op;
12116 static GTY(()) tree gcc_eh_personality_decl;
12118 /* Return the GCC personality function decl. */
12120 tree
12121 lhd_gcc_personality (void)
12123 if (!gcc_eh_personality_decl)
12124 gcc_eh_personality_decl = build_personality_function ("gcc");
12125 return gcc_eh_personality_decl;
12128 /* TARGET is a call target of GIMPLE call statement
12129 (obtained by gimple_call_fn). Return true if it is
12130 OBJ_TYPE_REF representing an virtual call of C++ method.
12131 (As opposed to OBJ_TYPE_REF representing objc calls
12132 through a cast where middle-end devirtualization machinery
12133 can't apply.) FOR_DUMP_P is true when being called from
12134 the dump routines. */
12136 bool
12137 virtual_method_call_p (const_tree target, bool for_dump_p)
12139 if (TREE_CODE (target) != OBJ_TYPE_REF)
12140 return false;
12141 tree t = TREE_TYPE (target);
12142 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12143 t = TREE_TYPE (t);
12144 if (TREE_CODE (t) == FUNCTION_TYPE)
12145 return false;
12146 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12147 /* If we do not have BINFO associated, it means that type was built
12148 without devirtualization enabled. Do not consider this a virtual
12149 call. */
12150 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12151 return false;
12152 return true;
12155 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12157 static tree
12158 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12160 unsigned int i;
12161 tree base_binfo, b;
12163 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12164 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12165 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12166 return base_binfo;
12167 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12168 return b;
12169 return NULL;
12172 /* Try to find a base info of BINFO that would have its field decl at offset
12173 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12174 found, return, otherwise return NULL_TREE. */
12176 tree
12177 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12179 tree type = BINFO_TYPE (binfo);
12181 while (true)
12183 HOST_WIDE_INT pos, size;
12184 tree fld;
12185 int i;
12187 if (types_same_for_odr (type, expected_type))
12188 return binfo;
12189 if (maybe_lt (offset, 0))
12190 return NULL_TREE;
12192 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12194 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12195 continue;
12197 pos = int_bit_position (fld);
12198 size = tree_to_uhwi (DECL_SIZE (fld));
12199 if (known_in_range_p (offset, pos, size))
12200 break;
12202 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12203 return NULL_TREE;
12205 /* Offset 0 indicates the primary base, whose vtable contents are
12206 represented in the binfo for the derived class. */
12207 else if (maybe_ne (offset, 0))
12209 tree found_binfo = NULL, base_binfo;
12210 /* Offsets in BINFO are in bytes relative to the whole structure
12211 while POS is in bits relative to the containing field. */
12212 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12213 / BITS_PER_UNIT);
12215 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12216 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12217 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12219 found_binfo = base_binfo;
12220 break;
12222 if (found_binfo)
12223 binfo = found_binfo;
12224 else
12225 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12226 binfo_offset);
12229 type = TREE_TYPE (fld);
12230 offset -= pos;
12234 /* PR 84195: Replace control characters in "unescaped" with their
12235 escaped equivalents. Allow newlines if -fmessage-length has
12236 been set to a non-zero value. This is done here, rather than
12237 where the attribute is recorded as the message length can
12238 change between these two locations. */
12240 void
12241 escaped_string::escape (const char *unescaped)
12243 char *escaped;
12244 size_t i, new_i, len;
12246 if (m_owned)
12247 free (m_str);
12249 m_str = const_cast<char *> (unescaped);
12250 m_owned = false;
12252 if (unescaped == NULL || *unescaped == 0)
12253 return;
12255 len = strlen (unescaped);
12256 escaped = NULL;
12257 new_i = 0;
12259 for (i = 0; i < len; i++)
12261 char c = unescaped[i];
12263 if (!ISCNTRL (c))
12265 if (escaped)
12266 escaped[new_i++] = c;
12267 continue;
12270 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12272 if (escaped == NULL)
12274 /* We only allocate space for a new string if we
12275 actually encounter a control character that
12276 needs replacing. */
12277 escaped = (char *) xmalloc (len * 2 + 1);
12278 strncpy (escaped, unescaped, i);
12279 new_i = i;
12282 escaped[new_i++] = '\\';
12284 switch (c)
12286 case '\a': escaped[new_i++] = 'a'; break;
12287 case '\b': escaped[new_i++] = 'b'; break;
12288 case '\f': escaped[new_i++] = 'f'; break;
12289 case '\n': escaped[new_i++] = 'n'; break;
12290 case '\r': escaped[new_i++] = 'r'; break;
12291 case '\t': escaped[new_i++] = 't'; break;
12292 case '\v': escaped[new_i++] = 'v'; break;
12293 default: escaped[new_i++] = '?'; break;
12296 else if (escaped)
12297 escaped[new_i++] = c;
12300 if (escaped)
12302 escaped[new_i] = 0;
12303 m_str = escaped;
12304 m_owned = true;
12308 /* Warn about a use of an identifier which was marked deprecated. Returns
12309 whether a warning was given. */
12311 bool
12312 warn_deprecated_use (tree node, tree attr)
12314 escaped_string msg;
12316 if (node == 0 || !warn_deprecated_decl)
12317 return false;
12319 if (!attr)
12321 if (DECL_P (node))
12322 attr = DECL_ATTRIBUTES (node);
12323 else if (TYPE_P (node))
12325 tree decl = TYPE_STUB_DECL (node);
12326 if (decl)
12327 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12328 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12329 != NULL_TREE)
12331 node = TREE_TYPE (decl);
12332 attr = TYPE_ATTRIBUTES (node);
12337 if (attr)
12338 attr = lookup_attribute ("deprecated", attr);
12340 if (attr)
12341 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12343 bool w = false;
12344 if (DECL_P (node))
12346 auto_diagnostic_group d;
12347 if (msg)
12348 w = warning (OPT_Wdeprecated_declarations,
12349 "%qD is deprecated: %s", node, (const char *) msg);
12350 else
12351 w = warning (OPT_Wdeprecated_declarations,
12352 "%qD is deprecated", node);
12353 if (w)
12354 inform (DECL_SOURCE_LOCATION (node), "declared here");
12356 else if (TYPE_P (node))
12358 tree what = NULL_TREE;
12359 tree decl = TYPE_STUB_DECL (node);
12361 if (TYPE_NAME (node))
12363 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12364 what = TYPE_NAME (node);
12365 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12366 && DECL_NAME (TYPE_NAME (node)))
12367 what = DECL_NAME (TYPE_NAME (node));
12370 auto_diagnostic_group d;
12371 if (what)
12373 if (msg)
12374 w = warning (OPT_Wdeprecated_declarations,
12375 "%qE is deprecated: %s", what, (const char *) msg);
12376 else
12377 w = warning (OPT_Wdeprecated_declarations,
12378 "%qE is deprecated", what);
12380 else
12382 if (msg)
12383 w = warning (OPT_Wdeprecated_declarations,
12384 "type is deprecated: %s", (const char *) msg);
12385 else
12386 w = warning (OPT_Wdeprecated_declarations,
12387 "type is deprecated");
12390 if (w && decl)
12391 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12394 return w;
12397 /* Error out with an identifier which was marked 'unavailable'. */
12398 void
12399 error_unavailable_use (tree node, tree attr)
12401 escaped_string msg;
12403 if (node == 0)
12404 return;
12406 if (!attr)
12408 if (DECL_P (node))
12409 attr = DECL_ATTRIBUTES (node);
12410 else if (TYPE_P (node))
12412 tree decl = TYPE_STUB_DECL (node);
12413 if (decl)
12414 attr = lookup_attribute ("unavailable",
12415 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12419 if (attr)
12420 attr = lookup_attribute ("unavailable", attr);
12422 if (attr)
12423 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12425 if (DECL_P (node))
12427 auto_diagnostic_group d;
12428 if (msg)
12429 error ("%qD is unavailable: %s", node, (const char *) msg);
12430 else
12431 error ("%qD is unavailable", node);
12432 inform (DECL_SOURCE_LOCATION (node), "declared here");
12434 else if (TYPE_P (node))
12436 tree what = NULL_TREE;
12437 tree decl = TYPE_STUB_DECL (node);
12439 if (TYPE_NAME (node))
12441 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12442 what = TYPE_NAME (node);
12443 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12444 && DECL_NAME (TYPE_NAME (node)))
12445 what = DECL_NAME (TYPE_NAME (node));
12448 auto_diagnostic_group d;
12449 if (what)
12451 if (msg)
12452 error ("%qE is unavailable: %s", what, (const char *) msg);
12453 else
12454 error ("%qE is unavailable", what);
12456 else
12458 if (msg)
12459 error ("type is unavailable: %s", (const char *) msg);
12460 else
12461 error ("type is unavailable");
12464 if (decl)
12465 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12469 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12470 somewhere in it. */
12472 bool
12473 contains_bitfld_component_ref_p (const_tree ref)
12475 while (handled_component_p (ref))
12477 if (TREE_CODE (ref) == COMPONENT_REF
12478 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12479 return true;
12480 ref = TREE_OPERAND (ref, 0);
12483 return false;
12486 /* Try to determine whether a TRY_CATCH expression can fall through.
12487 This is a subroutine of block_may_fallthru. */
12489 static bool
12490 try_catch_may_fallthru (const_tree stmt)
12492 tree_stmt_iterator i;
12494 /* If the TRY block can fall through, the whole TRY_CATCH can
12495 fall through. */
12496 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12497 return true;
12499 i = tsi_start (TREE_OPERAND (stmt, 1));
12500 switch (TREE_CODE (tsi_stmt (i)))
12502 case CATCH_EXPR:
12503 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12504 catch expression and a body. The whole TRY_CATCH may fall
12505 through iff any of the catch bodies falls through. */
12506 for (; !tsi_end_p (i); tsi_next (&i))
12508 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12509 return true;
12511 return false;
12513 case EH_FILTER_EXPR:
12514 /* The exception filter expression only matters if there is an
12515 exception. If the exception does not match EH_FILTER_TYPES,
12516 we will execute EH_FILTER_FAILURE, and we will fall through
12517 if that falls through. If the exception does match
12518 EH_FILTER_TYPES, the stack unwinder will continue up the
12519 stack, so we will not fall through. We don't know whether we
12520 will throw an exception which matches EH_FILTER_TYPES or not,
12521 so we just ignore EH_FILTER_TYPES and assume that we might
12522 throw an exception which doesn't match. */
12523 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12525 default:
12526 /* This case represents statements to be executed when an
12527 exception occurs. Those statements are implicitly followed
12528 by a RESX statement to resume execution after the exception.
12529 So in this case the TRY_CATCH never falls through. */
12530 return false;
12534 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12535 need not be 100% accurate; simply be conservative and return true if we
12536 don't know. This is used only to avoid stupidly generating extra code.
12537 If we're wrong, we'll just delete the extra code later. */
12539 bool
12540 block_may_fallthru (const_tree block)
12542 /* This CONST_CAST is okay because expr_last returns its argument
12543 unmodified and we assign it to a const_tree. */
12544 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12546 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12548 case GOTO_EXPR:
12549 case RETURN_EXPR:
12550 /* Easy cases. If the last statement of the block implies
12551 control transfer, then we can't fall through. */
12552 return false;
12554 case SWITCH_EXPR:
12555 /* If there is a default: label or case labels cover all possible
12556 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12557 to some case label in all cases and all we care is whether the
12558 SWITCH_BODY falls through. */
12559 if (SWITCH_ALL_CASES_P (stmt))
12560 return block_may_fallthru (SWITCH_BODY (stmt));
12561 return true;
12563 case COND_EXPR:
12564 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12565 return true;
12566 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12568 case BIND_EXPR:
12569 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12571 case TRY_CATCH_EXPR:
12572 return try_catch_may_fallthru (stmt);
12574 case TRY_FINALLY_EXPR:
12575 /* The finally clause is always executed after the try clause,
12576 so if it does not fall through, then the try-finally will not
12577 fall through. Otherwise, if the try clause does not fall
12578 through, then when the finally clause falls through it will
12579 resume execution wherever the try clause was going. So the
12580 whole try-finally will only fall through if both the try
12581 clause and the finally clause fall through. */
12582 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12583 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12585 case EH_ELSE_EXPR:
12586 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12588 case MODIFY_EXPR:
12589 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12590 stmt = TREE_OPERAND (stmt, 1);
12591 else
12592 return true;
12593 /* FALLTHRU */
12595 case CALL_EXPR:
12596 /* Functions that do not return do not fall through. */
12597 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12599 case CLEANUP_POINT_EXPR:
12600 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12602 case TARGET_EXPR:
12603 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12605 case ERROR_MARK:
12606 return true;
12608 default:
12609 return lang_hooks.block_may_fallthru (stmt);
12613 /* True if we are using EH to handle cleanups. */
12614 static bool using_eh_for_cleanups_flag = false;
12616 /* This routine is called from front ends to indicate eh should be used for
12617 cleanups. */
12618 void
12619 using_eh_for_cleanups (void)
12621 using_eh_for_cleanups_flag = true;
12624 /* Query whether EH is used for cleanups. */
12625 bool
12626 using_eh_for_cleanups_p (void)
12628 return using_eh_for_cleanups_flag;
12631 /* Wrapper for tree_code_name to ensure that tree code is valid */
12632 const char *
12633 get_tree_code_name (enum tree_code code)
12635 const char *invalid = "<invalid tree code>";
12637 /* The tree_code enum promotes to signed, but we could be getting
12638 invalid values, so force an unsigned comparison. */
12639 if (unsigned (code) >= MAX_TREE_CODES)
12641 if ((unsigned)code == 0xa5a5)
12642 return "ggc_freed";
12643 return invalid;
12646 return tree_code_name[code];
12649 /* Drops the TREE_OVERFLOW flag from T. */
12651 tree
12652 drop_tree_overflow (tree t)
12654 gcc_checking_assert (TREE_OVERFLOW (t));
12656 /* For tree codes with a sharing machinery re-build the result. */
12657 if (poly_int_tree_p (t))
12658 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12660 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12661 and canonicalize the result. */
12662 if (TREE_CODE (t) == VECTOR_CST)
12664 tree_vector_builder builder;
12665 builder.new_unary_operation (TREE_TYPE (t), t, true);
12666 unsigned int count = builder.encoded_nelts ();
12667 for (unsigned int i = 0; i < count; ++i)
12669 tree elt = VECTOR_CST_ELT (t, i);
12670 if (TREE_OVERFLOW (elt))
12671 elt = drop_tree_overflow (elt);
12672 builder.quick_push (elt);
12674 return builder.build ();
12677 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12678 and drop the flag. */
12679 t = copy_node (t);
12680 TREE_OVERFLOW (t) = 0;
12682 /* For constants that contain nested constants, drop the flag
12683 from those as well. */
12684 if (TREE_CODE (t) == COMPLEX_CST)
12686 if (TREE_OVERFLOW (TREE_REALPART (t)))
12687 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12688 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12689 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12692 return t;
12695 /* Given a memory reference expression T, return its base address.
12696 The base address of a memory reference expression is the main
12697 object being referenced. For instance, the base address for
12698 'array[i].fld[j]' is 'array'. You can think of this as stripping
12699 away the offset part from a memory address.
12701 This function calls handled_component_p to strip away all the inner
12702 parts of the memory reference until it reaches the base object. */
12704 tree
12705 get_base_address (tree t)
12707 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12708 t = TREE_OPERAND (t, 0);
12709 while (handled_component_p (t))
12710 t = TREE_OPERAND (t, 0);
12712 if ((TREE_CODE (t) == MEM_REF
12713 || TREE_CODE (t) == TARGET_MEM_REF)
12714 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12715 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12717 return t;
12720 /* Return a tree of sizetype representing the size, in bytes, of the element
12721 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12723 tree
12724 array_ref_element_size (tree exp)
12726 tree aligned_size = TREE_OPERAND (exp, 3);
12727 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12728 location_t loc = EXPR_LOCATION (exp);
12730 /* If a size was specified in the ARRAY_REF, it's the size measured
12731 in alignment units of the element type. So multiply by that value. */
12732 if (aligned_size)
12734 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12735 sizetype from another type of the same width and signedness. */
12736 if (TREE_TYPE (aligned_size) != sizetype)
12737 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12738 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12739 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12742 /* Otherwise, take the size from that of the element type. Substitute
12743 any PLACEHOLDER_EXPR that we have. */
12744 else
12745 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12748 /* Return a tree representing the lower bound of the array mentioned in
12749 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12751 tree
12752 array_ref_low_bound (tree exp)
12754 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12756 /* If a lower bound is specified in EXP, use it. */
12757 if (TREE_OPERAND (exp, 2))
12758 return TREE_OPERAND (exp, 2);
12760 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12761 substituting for a PLACEHOLDER_EXPR as needed. */
12762 if (domain_type && TYPE_MIN_VALUE (domain_type))
12763 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12765 /* Otherwise, return a zero of the appropriate type. */
12766 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12767 return (idxtype == error_mark_node
12768 ? integer_zero_node : build_int_cst (idxtype, 0));
12771 /* Return a tree representing the upper bound of the array mentioned in
12772 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12774 tree
12775 array_ref_up_bound (tree exp)
12777 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12779 /* If there is a domain type and it has an upper bound, use it, substituting
12780 for a PLACEHOLDER_EXPR as needed. */
12781 if (domain_type && TYPE_MAX_VALUE (domain_type))
12782 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12784 /* Otherwise fail. */
12785 return NULL_TREE;
12788 /* Returns true if REF is an array reference, a component reference,
12789 or a memory reference to an array whose actual size might be larger
12790 than its upper bound implies, there are multiple cases:
12791 A. a ref to a flexible array member at the end of a structure;
12792 B. a ref to an array with a different type against the original decl;
12793 for example:
12795 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12796 (*((char(*)[16])&a[0]))[i+8]
12798 C. a ref to an array that was passed as a parameter;
12799 for example:
12801 int test (uint8_t *p, uint32_t t[1][1], int n) {
12802 for (int i = 0; i < 4; i++, p++)
12803 t[i][0] = ...;
12805 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12808 bool
12809 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12811 /* The TYPE for this array referece. */
12812 tree atype = NULL_TREE;
12813 /* The FIELD_DECL for the array field in the containing structure. */
12814 tree afield_decl = NULL_TREE;
12815 /* Whether this array is the trailing array of a structure. */
12816 bool is_trailing_array_tmp = false;
12817 if (!is_trailing_array)
12818 is_trailing_array = &is_trailing_array_tmp;
12820 if (TREE_CODE (ref) == ARRAY_REF
12821 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12823 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12824 ref = TREE_OPERAND (ref, 0);
12826 else if (TREE_CODE (ref) == COMPONENT_REF
12827 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12829 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12830 afield_decl = TREE_OPERAND (ref, 1);
12832 else if (TREE_CODE (ref) == MEM_REF)
12834 tree arg = TREE_OPERAND (ref, 0);
12835 if (TREE_CODE (arg) == ADDR_EXPR)
12836 arg = TREE_OPERAND (arg, 0);
12837 tree argtype = TREE_TYPE (arg);
12838 if (TREE_CODE (argtype) == RECORD_TYPE)
12840 if (tree fld = last_field (argtype))
12842 atype = TREE_TYPE (fld);
12843 afield_decl = fld;
12844 if (TREE_CODE (atype) != ARRAY_TYPE)
12845 return false;
12846 if (VAR_P (arg) && DECL_SIZE (fld))
12847 return false;
12849 else
12850 return false;
12852 else
12853 return false;
12855 else
12856 return false;
12858 if (TREE_CODE (ref) == STRING_CST)
12859 return false;
12861 tree ref_to_array = ref;
12862 while (handled_component_p (ref))
12864 /* If the reference chain contains a component reference to a
12865 non-union type and there follows another field the reference
12866 is not at the end of a structure. */
12867 if (TREE_CODE (ref) == COMPONENT_REF)
12869 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12871 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12872 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12873 nextf = DECL_CHAIN (nextf);
12874 if (nextf)
12875 return false;
12878 /* If we have a multi-dimensional array we do not consider
12879 a non-innermost dimension as flex array if the whole
12880 multi-dimensional array is at struct end.
12881 Same for an array of aggregates with a trailing array
12882 member. */
12883 else if (TREE_CODE (ref) == ARRAY_REF)
12884 return false;
12885 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12887 /* If we view an underlying object as sth else then what we
12888 gathered up to now is what we have to rely on. */
12889 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12890 break;
12891 else
12892 gcc_unreachable ();
12894 ref = TREE_OPERAND (ref, 0);
12897 gcc_assert (!afield_decl
12898 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12900 /* The array now is at struct end. Treat flexible array member as
12901 always subject to extend, even into just padding constrained by
12902 an underlying decl. */
12903 if (! TYPE_SIZE (atype)
12904 || ! TYPE_DOMAIN (atype)
12905 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12907 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12908 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12911 /* If the reference is based on a declared entity, the size of the array
12912 is constrained by its given domain. (Do not trust commons PR/69368). */
12913 ref = get_base_address (ref);
12914 if (ref
12915 && DECL_P (ref)
12916 && !(flag_unconstrained_commons
12917 && VAR_P (ref) && DECL_COMMON (ref))
12918 && DECL_SIZE_UNIT (ref)
12919 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12921 /* If the object itself is the array it is not at struct end. */
12922 if (DECL_P (ref_to_array))
12923 return false;
12925 /* Check whether the array domain covers all of the available
12926 padding. */
12927 poly_int64 offset;
12928 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12929 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12930 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12932 *is_trailing_array
12933 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12934 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12936 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12938 *is_trailing_array
12939 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12940 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12943 /* If at least one extra element fits it is a flexarray. */
12944 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12945 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12946 + 2)
12947 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12948 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12950 *is_trailing_array
12951 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12952 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12955 return false;
12958 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12959 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12963 /* Return a tree representing the offset, in bytes, of the field referenced
12964 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12966 tree
12967 component_ref_field_offset (tree exp)
12969 tree aligned_offset = TREE_OPERAND (exp, 2);
12970 tree field = TREE_OPERAND (exp, 1);
12971 location_t loc = EXPR_LOCATION (exp);
12973 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12974 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12975 value. */
12976 if (aligned_offset)
12978 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12979 sizetype from another type of the same width and signedness. */
12980 if (TREE_TYPE (aligned_offset) != sizetype)
12981 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12982 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12983 size_int (DECL_OFFSET_ALIGN (field)
12984 / BITS_PER_UNIT));
12987 /* Otherwise, take the offset from that of the field. Substitute
12988 any PLACEHOLDER_EXPR that we have. */
12989 else
12990 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12993 /* Given the initializer INIT, return the initializer for the field
12994 DECL if it exists, otherwise null. Used to obtain the initializer
12995 for a flexible array member and determine its size. */
12997 static tree
12998 get_initializer_for (tree init, tree decl)
13000 STRIP_NOPS (init);
13002 tree fld, fld_init;
13003 unsigned HOST_WIDE_INT i;
13004 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13006 if (decl == fld)
13007 return fld_init;
13009 if (TREE_CODE (fld) == CONSTRUCTOR)
13011 fld_init = get_initializer_for (fld_init, decl);
13012 if (fld_init)
13013 return fld_init;
13017 return NULL_TREE;
13020 /* Determines the special array member type for the array reference REF. */
13021 special_array_member
13022 component_ref_sam_type (tree ref)
13024 special_array_member sam_type = special_array_member::none;
13026 tree member = TREE_OPERAND (ref, 1);
13027 tree memsize = DECL_SIZE_UNIT (member);
13028 if (memsize)
13030 tree memtype = TREE_TYPE (member);
13031 if (TREE_CODE (memtype) != ARRAY_TYPE)
13032 return sam_type;
13034 bool trailing = false;
13035 (void) array_ref_flexible_size_p (ref, &trailing);
13036 bool zero_elts = integer_zerop (memsize);
13037 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13039 /* If array element has zero size, verify if it is a flexible
13040 array member or zero length array. Clear zero_elts if
13041 it has one or more members or is a VLA member. */
13042 if (tree dom = TYPE_DOMAIN (memtype))
13043 if (tree min = TYPE_MIN_VALUE (dom))
13044 if (tree max = TYPE_MAX_VALUE (dom))
13045 if (TREE_CODE (min) != INTEGER_CST
13046 || TREE_CODE (max) != INTEGER_CST
13047 || !((integer_zerop (min) && integer_all_onesp (max))
13048 || tree_int_cst_lt (max, min)))
13049 zero_elts = false;
13051 if (!trailing && !zero_elts)
13052 /* MEMBER is an interior array with more than one element. */
13053 return special_array_member::int_n;
13055 if (zero_elts)
13057 if (trailing)
13058 return special_array_member::trail_0;
13059 else
13060 return special_array_member::int_0;
13063 if (!zero_elts)
13064 if (tree dom = TYPE_DOMAIN (memtype))
13065 if (tree min = TYPE_MIN_VALUE (dom))
13066 if (tree max = TYPE_MAX_VALUE (dom))
13067 if (TREE_CODE (min) == INTEGER_CST
13068 && TREE_CODE (max) == INTEGER_CST)
13070 offset_int minidx = wi::to_offset (min);
13071 offset_int maxidx = wi::to_offset (max);
13072 offset_int neltsm1 = maxidx - minidx;
13073 if (neltsm1 > 0)
13074 /* MEMBER is a trailing array with more than
13075 one elements. */
13076 return special_array_member::trail_n;
13078 if (neltsm1 == 0)
13079 return special_array_member::trail_1;
13083 return sam_type;
13086 /* Determines the size of the member referenced by the COMPONENT_REF
13087 REF, using its initializer expression if necessary in order to
13088 determine the size of an initialized flexible array member.
13089 If non-null, set *SAM to the type of special array member.
13090 Returns the size as sizetype (which might be zero for an object
13091 with an uninitialized flexible array member) or null if the size
13092 cannot be determined. */
13094 tree
13095 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13097 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13099 special_array_member sambuf;
13100 if (!sam)
13101 sam = &sambuf;
13102 *sam = component_ref_sam_type (ref);
13104 /* The object/argument referenced by the COMPONENT_REF and its type. */
13105 tree arg = TREE_OPERAND (ref, 0);
13106 tree argtype = TREE_TYPE (arg);
13107 /* The referenced member. */
13108 tree member = TREE_OPERAND (ref, 1);
13110 tree memsize = DECL_SIZE_UNIT (member);
13111 if (memsize)
13113 tree memtype = TREE_TYPE (member);
13114 if (TREE_CODE (memtype) != ARRAY_TYPE)
13115 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13116 to the type of a class with a virtual base which doesn't
13117 reflect the size of the virtual's members (see pr97595).
13118 If that's the case fail for now and implement something
13119 more robust in the future. */
13120 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13121 ? memsize : NULL_TREE);
13123 /* 2-or-more elements arrays are treated as normal arrays by default. */
13124 if (*sam == special_array_member::int_n
13125 || *sam == special_array_member::trail_n)
13126 return memsize;
13128 tree afield_decl = TREE_OPERAND (ref, 1);
13129 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13130 /* If the trailing array is a not a flexible array member, treat it as
13131 a normal array. */
13132 if (DECL_NOT_FLEXARRAY (afield_decl)
13133 && *sam != special_array_member::int_0)
13134 return memsize;
13136 if (*sam == special_array_member::int_0)
13137 memsize = NULL_TREE;
13139 /* For a reference to a flexible array member of a union
13140 use the size of the union instead of the size of the member. */
13141 if (TREE_CODE (argtype) == UNION_TYPE)
13142 memsize = TYPE_SIZE_UNIT (argtype);
13145 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13146 array member, or an array of length one treated as such. */
13148 /* If the reference is to a declared object and the member a true
13149 flexible array, try to determine its size from its initializer. */
13150 poly_int64 baseoff = 0;
13151 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13152 if (!base || !VAR_P (base))
13154 if (*sam != special_array_member::int_0)
13155 return NULL_TREE;
13157 if (TREE_CODE (arg) != COMPONENT_REF)
13158 return NULL_TREE;
13160 base = arg;
13161 while (TREE_CODE (base) == COMPONENT_REF)
13162 base = TREE_OPERAND (base, 0);
13163 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13166 /* BASE is the declared object of which MEMBER is either a member
13167 or that is cast to ARGTYPE (e.g., a char buffer used to store
13168 an ARGTYPE object). */
13169 tree basetype = TREE_TYPE (base);
13171 /* Determine the base type of the referenced object. If it's
13172 the same as ARGTYPE and MEMBER has a known size, return it. */
13173 tree bt = basetype;
13174 if (*sam != special_array_member::int_0)
13175 while (TREE_CODE (bt) == ARRAY_TYPE)
13176 bt = TREE_TYPE (bt);
13177 bool typematch = useless_type_conversion_p (argtype, bt);
13178 if (memsize && typematch)
13179 return memsize;
13181 memsize = NULL_TREE;
13183 if (typematch)
13184 /* MEMBER is a true flexible array member. Compute its size from
13185 the initializer of the BASE object if it has one. */
13186 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13187 if (init != error_mark_node)
13189 init = get_initializer_for (init, member);
13190 if (init)
13192 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13193 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13195 /* Use the larger of the initializer size and the tail
13196 padding in the enclosing struct. */
13197 poly_int64 rsz = tree_to_poly_int64 (refsize);
13198 rsz -= baseoff;
13199 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13200 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13203 baseoff = 0;
13207 if (!memsize)
13209 if (typematch)
13211 if (DECL_P (base)
13212 && DECL_EXTERNAL (base)
13213 && bt == basetype
13214 && *sam != special_array_member::int_0)
13215 /* The size of a flexible array member of an extern struct
13216 with no initializer cannot be determined (it's defined
13217 in another translation unit and can have an initializer
13218 with an arbitrary number of elements). */
13219 return NULL_TREE;
13221 /* Use the size of the base struct or, for interior zero-length
13222 arrays, the size of the enclosing type. */
13223 memsize = TYPE_SIZE_UNIT (bt);
13225 else if (DECL_P (base))
13226 /* Use the size of the BASE object (possibly an array of some
13227 other type such as char used to store the struct). */
13228 memsize = DECL_SIZE_UNIT (base);
13229 else
13230 return NULL_TREE;
13233 /* If the flexible array member has a known size use the greater
13234 of it and the tail padding in the enclosing struct.
13235 Otherwise, when the size of the flexible array member is unknown
13236 and the referenced object is not a struct, use the size of its
13237 type when known. This detects sizes of array buffers when cast
13238 to struct types with flexible array members. */
13239 if (memsize)
13241 if (!tree_fits_poly_int64_p (memsize))
13242 return NULL_TREE;
13243 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13244 if (known_lt (baseoff, memsz64))
13246 memsz64 -= baseoff;
13247 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13249 return size_zero_node;
13252 /* Return "don't know" for an external non-array object since its
13253 flexible array member can be initialized to have any number of
13254 elements. Otherwise, return zero because the flexible array
13255 member has no elements. */
13256 return (DECL_P (base)
13257 && DECL_EXTERNAL (base)
13258 && (!typematch
13259 || TREE_CODE (basetype) != ARRAY_TYPE)
13260 ? NULL_TREE : size_zero_node);
13263 /* Return the machine mode of T. For vectors, returns the mode of the
13264 inner type. The main use case is to feed the result to HONOR_NANS,
13265 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13267 machine_mode
13268 element_mode (const_tree t)
13270 if (!TYPE_P (t))
13271 t = TREE_TYPE (t);
13272 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13273 t = TREE_TYPE (t);
13274 return TYPE_MODE (t);
13277 /* Vector types need to re-check the target flags each time we report
13278 the machine mode. We need to do this because attribute target can
13279 change the result of vector_mode_supported_p and have_regs_of_mode
13280 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13281 change on a per-function basis. */
13282 /* ??? Possibly a better solution is to run through all the types
13283 referenced by a function and re-compute the TYPE_MODE once, rather
13284 than make the TYPE_MODE macro call a function. */
13286 machine_mode
13287 vector_type_mode (const_tree t)
13289 machine_mode mode;
13291 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13293 mode = t->type_common.mode;
13294 if (VECTOR_MODE_P (mode)
13295 && (!targetm.vector_mode_supported_p (mode)
13296 || !have_regs_of_mode[mode]))
13298 scalar_int_mode innermode;
13300 /* For integers, try mapping it to a same-sized scalar mode. */
13301 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13303 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13304 * GET_MODE_BITSIZE (innermode));
13305 scalar_int_mode mode;
13306 if (int_mode_for_size (size, 0).exists (&mode)
13307 && have_regs_of_mode[mode])
13308 return mode;
13311 return BLKmode;
13314 return mode;
13317 /* Return the size in bits of each element of vector type TYPE. */
13319 unsigned int
13320 vector_element_bits (const_tree type)
13322 gcc_checking_assert (VECTOR_TYPE_P (type));
13323 if (VECTOR_BOOLEAN_TYPE_P (type))
13324 return TYPE_PRECISION (TREE_TYPE (type));
13325 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13328 /* Calculate the size in bits of each element of vector type TYPE
13329 and return the result as a tree of type bitsizetype. */
13331 tree
13332 vector_element_bits_tree (const_tree type)
13334 gcc_checking_assert (VECTOR_TYPE_P (type));
13335 if (VECTOR_BOOLEAN_TYPE_P (type))
13336 return bitsize_int (vector_element_bits (type));
13337 return TYPE_SIZE (TREE_TYPE (type));
13340 /* Verify that basic properties of T match TV and thus T can be a variant of
13341 TV. TV should be the more specified variant (i.e. the main variant). */
13343 static bool
13344 verify_type_variant (const_tree t, tree tv)
13346 /* Type variant can differ by:
13348 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13349 ENCODE_QUAL_ADDR_SPACE.
13350 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13351 in this case some values may not be set in the variant types
13352 (see TYPE_COMPLETE_P checks).
13353 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13354 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13355 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13356 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13357 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13358 this is necessary to make it possible to merge types form different TUs
13359 - arrays, pointers and references may have TREE_TYPE that is a variant
13360 of TREE_TYPE of their main variants.
13361 - aggregates may have new TYPE_FIELDS list that list variants of
13362 the main variant TYPE_FIELDS.
13363 - vector types may differ by TYPE_VECTOR_OPAQUE
13366 /* Convenience macro for matching individual fields. */
13367 #define verify_variant_match(flag) \
13368 do { \
13369 if (flag (tv) != flag (t)) \
13371 error ("type variant differs by %s", #flag); \
13372 debug_tree (tv); \
13373 return false; \
13375 } while (false)
13377 /* tree_base checks. */
13379 verify_variant_match (TREE_CODE);
13380 /* FIXME: Ada builds non-artificial variants of artificial types. */
13381 #if 0
13382 if (TYPE_ARTIFICIAL (tv))
13383 verify_variant_match (TYPE_ARTIFICIAL);
13384 #endif
13385 if (POINTER_TYPE_P (tv))
13386 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13387 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13388 verify_variant_match (TYPE_UNSIGNED);
13389 verify_variant_match (TYPE_PACKED);
13390 if (TREE_CODE (t) == REFERENCE_TYPE)
13391 verify_variant_match (TYPE_REF_IS_RVALUE);
13392 if (AGGREGATE_TYPE_P (t))
13393 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13394 else
13395 verify_variant_match (TYPE_SATURATING);
13396 /* FIXME: This check trigger during libstdc++ build. */
13397 #if 0
13398 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13399 verify_variant_match (TYPE_FINAL_P);
13400 #endif
13402 /* tree_type_common checks. */
13404 if (COMPLETE_TYPE_P (t))
13406 verify_variant_match (TYPE_MODE);
13407 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13408 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13409 verify_variant_match (TYPE_SIZE);
13410 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13411 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13412 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13414 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13415 TYPE_SIZE_UNIT (tv), 0));
13416 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13417 debug_tree (tv);
13418 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13419 debug_tree (TYPE_SIZE_UNIT (tv));
13420 error ("type%'s %<TYPE_SIZE_UNIT%>");
13421 debug_tree (TYPE_SIZE_UNIT (t));
13422 return false;
13424 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13426 verify_variant_match (TYPE_PRECISION_RAW);
13427 if (RECORD_OR_UNION_TYPE_P (t))
13428 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13429 else if (TREE_CODE (t) == ARRAY_TYPE)
13430 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13431 /* During LTO we merge variant lists from diferent translation units
13432 that may differ BY TYPE_CONTEXT that in turn may point
13433 to TRANSLATION_UNIT_DECL.
13434 Ada also builds variants of types with different TYPE_CONTEXT. */
13435 #if 0
13436 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13437 verify_variant_match (TYPE_CONTEXT);
13438 #endif
13439 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13440 verify_variant_match (TYPE_STRING_FLAG);
13441 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13442 verify_variant_match (TYPE_CXX_ODR_P);
13443 if (TYPE_ALIAS_SET_KNOWN_P (t))
13445 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13446 debug_tree (tv);
13447 return false;
13450 /* tree_type_non_common checks. */
13452 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13453 and dangle the pointer from time to time. */
13454 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13455 && (in_lto_p || !TYPE_VFIELD (tv)
13456 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13458 error ("type variant has different %<TYPE_VFIELD%>");
13459 debug_tree (tv);
13460 return false;
13462 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13463 || TREE_CODE (t) == INTEGER_TYPE
13464 || TREE_CODE (t) == BOOLEAN_TYPE
13465 || SCALAR_FLOAT_TYPE_P (t)
13466 || FIXED_POINT_TYPE_P (t))
13468 verify_variant_match (TYPE_MAX_VALUE);
13469 verify_variant_match (TYPE_MIN_VALUE);
13471 if (TREE_CODE (t) == METHOD_TYPE)
13472 verify_variant_match (TYPE_METHOD_BASETYPE);
13473 if (TREE_CODE (t) == OFFSET_TYPE)
13474 verify_variant_match (TYPE_OFFSET_BASETYPE);
13475 if (TREE_CODE (t) == ARRAY_TYPE)
13476 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13477 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13478 or even type's main variant. This is needed to make bootstrap pass
13479 and the bug seems new in GCC 5.
13480 C++ FE should be updated to make this consistent and we should check
13481 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13482 is a match with main variant.
13484 Also disable the check for Java for now because of parser hack that builds
13485 first an dummy BINFO and then sometimes replace it by real BINFO in some
13486 of the copies. */
13487 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13488 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13489 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13490 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13491 at LTO time only. */
13492 && (in_lto_p && odr_type_p (t)))
13494 error ("type variant has different %<TYPE_BINFO%>");
13495 debug_tree (tv);
13496 error ("type variant%'s %<TYPE_BINFO%>");
13497 debug_tree (TYPE_BINFO (tv));
13498 error ("type%'s %<TYPE_BINFO%>");
13499 debug_tree (TYPE_BINFO (t));
13500 return false;
13503 /* Check various uses of TYPE_VALUES_RAW. */
13504 if (TREE_CODE (t) == ENUMERAL_TYPE
13505 && TYPE_VALUES (t))
13506 verify_variant_match (TYPE_VALUES);
13507 else if (TREE_CODE (t) == ARRAY_TYPE)
13508 verify_variant_match (TYPE_DOMAIN);
13509 /* Permit incomplete variants of complete type. While FEs may complete
13510 all variants, this does not happen for C++ templates in all cases. */
13511 else if (RECORD_OR_UNION_TYPE_P (t)
13512 && COMPLETE_TYPE_P (t)
13513 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13515 tree f1, f2;
13517 /* Fortran builds qualified variants as new records with items of
13518 qualified type. Verify that they looks same. */
13519 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13520 f1 && f2;
13521 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13522 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13523 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13524 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13525 /* FIXME: gfc_nonrestricted_type builds all types as variants
13526 with exception of pointer types. It deeply copies the type
13527 which means that we may end up with a variant type
13528 referring non-variant pointer. We may change it to
13529 produce types as variants, too, like
13530 objc_get_protocol_qualified_type does. */
13531 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13532 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13533 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13534 break;
13535 if (f1 || f2)
13537 error ("type variant has different %<TYPE_FIELDS%>");
13538 debug_tree (tv);
13539 error ("first mismatch is field");
13540 debug_tree (f1);
13541 error ("and field");
13542 debug_tree (f2);
13543 return false;
13546 else if (FUNC_OR_METHOD_TYPE_P (t))
13547 verify_variant_match (TYPE_ARG_TYPES);
13548 /* For C++ the qualified variant of array type is really an array type
13549 of qualified TREE_TYPE.
13550 objc builds variants of pointer where pointer to type is a variant, too
13551 in objc_get_protocol_qualified_type. */
13552 if (TREE_TYPE (t) != TREE_TYPE (tv)
13553 && ((TREE_CODE (t) != ARRAY_TYPE
13554 && !POINTER_TYPE_P (t))
13555 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13556 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13558 error ("type variant has different %<TREE_TYPE%>");
13559 debug_tree (tv);
13560 error ("type variant%'s %<TREE_TYPE%>");
13561 debug_tree (TREE_TYPE (tv));
13562 error ("type%'s %<TREE_TYPE%>");
13563 debug_tree (TREE_TYPE (t));
13564 return false;
13566 if (type_with_alias_set_p (t)
13567 && !gimple_canonical_types_compatible_p (t, tv, false))
13569 error ("type is not compatible with its variant");
13570 debug_tree (tv);
13571 error ("type variant%'s %<TREE_TYPE%>");
13572 debug_tree (TREE_TYPE (tv));
13573 error ("type%'s %<TREE_TYPE%>");
13574 debug_tree (TREE_TYPE (t));
13575 return false;
13577 return true;
13578 #undef verify_variant_match
13582 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13583 the middle-end types_compatible_p function. It needs to avoid
13584 claiming types are different for types that should be treated
13585 the same with respect to TBAA. Canonical types are also used
13586 for IL consistency checks via the useless_type_conversion_p
13587 predicate which does not handle all type kinds itself but falls
13588 back to pointer-comparison of TYPE_CANONICAL for aggregates
13589 for example. */
13591 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13592 type calculation because we need to allow inter-operability between signed
13593 and unsigned variants. */
13595 bool
13596 type_with_interoperable_signedness (const_tree type)
13598 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13599 signed char and unsigned char. Similarly fortran FE builds
13600 C_SIZE_T as signed type, while C defines it unsigned. */
13602 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13603 == INTEGER_TYPE
13604 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13605 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13608 /* Return true iff T1 and T2 are structurally identical for what
13609 TBAA is concerned.
13610 This function is used both by lto.cc canonical type merging and by the
13611 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13612 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13613 only for LTO because only in these cases TYPE_CANONICAL equivalence
13614 correspond to one defined by gimple_canonical_types_compatible_p. */
13616 bool
13617 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13618 bool trust_type_canonical)
13620 /* Type variants should be same as the main variant. When not doing sanity
13621 checking to verify this fact, go to main variants and save some work. */
13622 if (trust_type_canonical)
13624 t1 = TYPE_MAIN_VARIANT (t1);
13625 t2 = TYPE_MAIN_VARIANT (t2);
13628 /* Check first for the obvious case of pointer identity. */
13629 if (t1 == t2)
13630 return true;
13632 /* Check that we have two types to compare. */
13633 if (t1 == NULL_TREE || t2 == NULL_TREE)
13634 return false;
13636 /* We consider complete types always compatible with incomplete type.
13637 This does not make sense for canonical type calculation and thus we
13638 need to ensure that we are never called on it.
13640 FIXME: For more correctness the function probably should have three modes
13641 1) mode assuming that types are complete mathcing their structure
13642 2) mode allowing incomplete types but producing equivalence classes
13643 and thus ignoring all info from complete types
13644 3) mode allowing incomplete types to match complete but checking
13645 compatibility between complete types.
13647 1 and 2 can be used for canonical type calculation. 3 is the real
13648 definition of type compatibility that can be used i.e. for warnings during
13649 declaration merging. */
13651 gcc_assert (!trust_type_canonical
13652 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13654 /* If the types have been previously registered and found equal
13655 they still are. */
13657 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13658 && trust_type_canonical)
13660 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13661 they are always NULL, but they are set to non-NULL for types
13662 constructed by build_pointer_type and variants. In this case the
13663 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13664 all pointers are considered equal. Be sure to not return false
13665 negatives. */
13666 gcc_checking_assert (canonical_type_used_p (t1)
13667 && canonical_type_used_p (t2));
13668 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13671 /* For types where we do ODR based TBAA the canonical type is always
13672 set correctly, so we know that types are different if their
13673 canonical types does not match. */
13674 if (trust_type_canonical
13675 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13676 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13677 return false;
13679 /* Can't be the same type if the types don't have the same code. */
13680 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13681 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13682 return false;
13684 /* Qualifiers do not matter for canonical type comparison purposes. */
13686 /* Void types and nullptr types are always the same. */
13687 if (VOID_TYPE_P (t1)
13688 || TREE_CODE (t1) == NULLPTR_TYPE)
13689 return true;
13691 /* Can't be the same type if they have different mode. */
13692 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13693 return false;
13695 /* Non-aggregate types can be handled cheaply. */
13696 if (INTEGRAL_TYPE_P (t1)
13697 || SCALAR_FLOAT_TYPE_P (t1)
13698 || FIXED_POINT_TYPE_P (t1)
13699 || VECTOR_TYPE_P (t1)
13700 || TREE_CODE (t1) == COMPLEX_TYPE
13701 || TREE_CODE (t1) == OFFSET_TYPE
13702 || POINTER_TYPE_P (t1))
13704 /* Can't be the same type if they have different precision. */
13705 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13706 return false;
13708 /* In some cases the signed and unsigned types are required to be
13709 inter-operable. */
13710 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13711 && !type_with_interoperable_signedness (t1))
13712 return false;
13714 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13715 interoperable with "signed char". Unless all frontends are revisited
13716 to agree on these types, we must ignore the flag completely. */
13718 /* Fortran standard define C_PTR type that is compatible with every
13719 C pointer. For this reason we need to glob all pointers into one.
13720 Still pointers in different address spaces are not compatible. */
13721 if (POINTER_TYPE_P (t1))
13723 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13724 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13725 return false;
13728 /* Tail-recurse to components. */
13729 if (VECTOR_TYPE_P (t1)
13730 || TREE_CODE (t1) == COMPLEX_TYPE)
13731 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13732 TREE_TYPE (t2),
13733 trust_type_canonical);
13735 return true;
13738 /* Do type-specific comparisons. */
13739 switch (TREE_CODE (t1))
13741 case ARRAY_TYPE:
13742 /* Array types are the same if the element types are the same and
13743 the number of elements are the same. */
13744 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13745 trust_type_canonical)
13746 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13747 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13748 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13749 return false;
13750 else
13752 tree i1 = TYPE_DOMAIN (t1);
13753 tree i2 = TYPE_DOMAIN (t2);
13755 /* For an incomplete external array, the type domain can be
13756 NULL_TREE. Check this condition also. */
13757 if (i1 == NULL_TREE && i2 == NULL_TREE)
13758 return true;
13759 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13760 return false;
13761 else
13763 tree min1 = TYPE_MIN_VALUE (i1);
13764 tree min2 = TYPE_MIN_VALUE (i2);
13765 tree max1 = TYPE_MAX_VALUE (i1);
13766 tree max2 = TYPE_MAX_VALUE (i2);
13768 /* The minimum/maximum values have to be the same. */
13769 if ((min1 == min2
13770 || (min1 && min2
13771 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13772 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13773 || operand_equal_p (min1, min2, 0))))
13774 && (max1 == max2
13775 || (max1 && max2
13776 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13777 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13778 || operand_equal_p (max1, max2, 0)))))
13779 return true;
13780 else
13781 return false;
13785 case METHOD_TYPE:
13786 case FUNCTION_TYPE:
13787 /* Function types are the same if the return type and arguments types
13788 are the same. */
13789 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13790 trust_type_canonical))
13791 return false;
13793 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13794 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13795 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13796 return true;
13797 else
13799 tree parms1, parms2;
13801 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13802 parms1 && parms2;
13803 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13805 if (!gimple_canonical_types_compatible_p
13806 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13807 trust_type_canonical))
13808 return false;
13811 if (parms1 || parms2)
13812 return false;
13814 return true;
13817 case RECORD_TYPE:
13818 case UNION_TYPE:
13819 case QUAL_UNION_TYPE:
13821 tree f1, f2;
13823 /* Don't try to compare variants of an incomplete type, before
13824 TYPE_FIELDS has been copied around. */
13825 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13826 return true;
13829 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13830 return false;
13832 /* For aggregate types, all the fields must be the same. */
13833 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13834 f1 || f2;
13835 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13837 /* Skip non-fields and zero-sized fields. */
13838 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13839 || (DECL_SIZE (f1)
13840 && integer_zerop (DECL_SIZE (f1)))))
13841 f1 = TREE_CHAIN (f1);
13842 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13843 || (DECL_SIZE (f2)
13844 && integer_zerop (DECL_SIZE (f2)))))
13845 f2 = TREE_CHAIN (f2);
13846 if (!f1 || !f2)
13847 break;
13848 /* The fields must have the same name, offset and type. */
13849 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13850 || !gimple_compare_field_offset (f1, f2)
13851 || !gimple_canonical_types_compatible_p
13852 (TREE_TYPE (f1), TREE_TYPE (f2),
13853 trust_type_canonical))
13854 return false;
13857 /* If one aggregate has more fields than the other, they
13858 are not the same. */
13859 if (f1 || f2)
13860 return false;
13862 return true;
13865 default:
13866 /* Consider all types with language specific trees in them mutually
13867 compatible. This is executed only from verify_type and false
13868 positives can be tolerated. */
13869 gcc_assert (!in_lto_p);
13870 return true;
13874 /* For OPAQUE_TYPE T, it should have only size and alignment information
13875 and its mode should be of class MODE_OPAQUE. This function verifies
13876 these properties of T match TV which is the main variant of T and TC
13877 which is the canonical of T. */
13879 static void
13880 verify_opaque_type (const_tree t, tree tv, tree tc)
13882 gcc_assert (OPAQUE_TYPE_P (t));
13883 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13884 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13886 /* For an opaque type T1, check if some of its properties match
13887 the corresponding ones of the other opaque type T2, emit some
13888 error messages for those inconsistent ones. */
13889 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13890 const char *kind_msg)
13892 if (!OPAQUE_TYPE_P (t2))
13894 error ("type %s is not an opaque type", kind_msg);
13895 debug_tree (t2);
13896 return;
13898 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13900 error ("type %s is not with opaque mode", kind_msg);
13901 debug_tree (t2);
13902 return;
13904 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13906 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13907 debug_tree (t2);
13908 return;
13910 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13911 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13912 if (maybe_ne (t1_size, t2_size))
13914 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13915 debug_tree (t2);
13916 return;
13918 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13920 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13921 debug_tree (t2);
13922 return;
13924 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13926 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13927 debug_tree (t2);
13928 return;
13932 if (t != tv)
13933 check_properties_for_opaque_type (t, tv, "variant");
13935 if (t != tc)
13936 check_properties_for_opaque_type (t, tc, "canonical");
13939 /* Verify type T. */
13941 void
13942 verify_type (const_tree t)
13944 bool error_found = false;
13945 tree mv = TYPE_MAIN_VARIANT (t);
13946 tree ct = TYPE_CANONICAL (t);
13948 if (OPAQUE_TYPE_P (t))
13950 verify_opaque_type (t, mv, ct);
13951 return;
13954 if (!mv)
13956 error ("main variant is not defined");
13957 error_found = true;
13959 else if (mv != TYPE_MAIN_VARIANT (mv))
13961 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13962 debug_tree (mv);
13963 error_found = true;
13965 else if (t != mv && !verify_type_variant (t, mv))
13966 error_found = true;
13968 if (!ct)
13970 else if (TYPE_CANONICAL (ct) != ct)
13972 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13973 debug_tree (ct);
13974 error_found = true;
13976 /* Method and function types cannot be used to address memory and thus
13977 TYPE_CANONICAL really matters only for determining useless conversions.
13979 FIXME: C++ FE produce declarations of builtin functions that are not
13980 compatible with main variants. */
13981 else if (TREE_CODE (t) == FUNCTION_TYPE)
13983 else if (t != ct
13984 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13985 with variably sized arrays because their sizes possibly
13986 gimplified to different variables. */
13987 && !variably_modified_type_p (ct, NULL)
13988 && !gimple_canonical_types_compatible_p (t, ct, false)
13989 && COMPLETE_TYPE_P (t))
13991 error ("%<TYPE_CANONICAL%> is not compatible");
13992 debug_tree (ct);
13993 error_found = true;
13996 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13997 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13999 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14000 debug_tree (ct);
14001 error_found = true;
14003 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14005 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14006 debug_tree (ct);
14007 debug_tree (TYPE_MAIN_VARIANT (ct));
14008 error_found = true;
14012 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14013 if (RECORD_OR_UNION_TYPE_P (t))
14015 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14016 and danagle the pointer from time to time. */
14017 if (TYPE_VFIELD (t)
14018 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14019 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14021 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14022 debug_tree (TYPE_VFIELD (t));
14023 error_found = true;
14026 else if (TREE_CODE (t) == POINTER_TYPE)
14028 if (TYPE_NEXT_PTR_TO (t)
14029 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14031 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14032 debug_tree (TYPE_NEXT_PTR_TO (t));
14033 error_found = true;
14036 else if (TREE_CODE (t) == REFERENCE_TYPE)
14038 if (TYPE_NEXT_REF_TO (t)
14039 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14041 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14042 debug_tree (TYPE_NEXT_REF_TO (t));
14043 error_found = true;
14046 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14047 || FIXED_POINT_TYPE_P (t))
14049 /* FIXME: The following check should pass:
14050 useless_type_conversion_p (const_cast <tree> (t),
14051 TREE_TYPE (TYPE_MIN_VALUE (t))
14052 but does not for C sizetypes in LTO. */
14055 /* Check various uses of TYPE_MAXVAL_RAW. */
14056 if (RECORD_OR_UNION_TYPE_P (t))
14058 if (!TYPE_BINFO (t))
14060 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14062 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14063 debug_tree (TYPE_BINFO (t));
14064 error_found = true;
14066 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14068 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14069 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14070 error_found = true;
14073 else if (FUNC_OR_METHOD_TYPE_P (t))
14075 if (TYPE_METHOD_BASETYPE (t)
14076 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14077 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14079 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14080 debug_tree (TYPE_METHOD_BASETYPE (t));
14081 error_found = true;
14084 else if (TREE_CODE (t) == OFFSET_TYPE)
14086 if (TYPE_OFFSET_BASETYPE (t)
14087 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14088 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14090 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14091 debug_tree (TYPE_OFFSET_BASETYPE (t));
14092 error_found = true;
14095 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14096 || FIXED_POINT_TYPE_P (t))
14098 /* FIXME: The following check should pass:
14099 useless_type_conversion_p (const_cast <tree> (t),
14100 TREE_TYPE (TYPE_MAX_VALUE (t))
14101 but does not for C sizetypes in LTO. */
14103 else if (TREE_CODE (t) == ARRAY_TYPE)
14105 if (TYPE_ARRAY_MAX_SIZE (t)
14106 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14108 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14109 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14110 error_found = true;
14113 else if (TYPE_MAX_VALUE_RAW (t))
14115 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14116 debug_tree (TYPE_MAX_VALUE_RAW (t));
14117 error_found = true;
14120 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14122 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14123 debug_tree (TYPE_LANG_SLOT_1 (t));
14124 error_found = true;
14127 /* Check various uses of TYPE_VALUES_RAW. */
14128 if (TREE_CODE (t) == ENUMERAL_TYPE)
14129 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14131 tree value = TREE_VALUE (l);
14132 tree name = TREE_PURPOSE (l);
14134 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14135 CONST_DECL of ENUMERAL TYPE. */
14136 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14138 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14139 debug_tree (value);
14140 debug_tree (name);
14141 error_found = true;
14143 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14144 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14145 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14147 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14148 "to the enum");
14149 debug_tree (value);
14150 debug_tree (name);
14151 error_found = true;
14153 if (TREE_CODE (name) != IDENTIFIER_NODE)
14155 error ("enum value name is not %<IDENTIFIER_NODE%>");
14156 debug_tree (value);
14157 debug_tree (name);
14158 error_found = true;
14161 else if (TREE_CODE (t) == ARRAY_TYPE)
14163 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14165 error ("array %<TYPE_DOMAIN%> is not integer type");
14166 debug_tree (TYPE_DOMAIN (t));
14167 error_found = true;
14170 else if (RECORD_OR_UNION_TYPE_P (t))
14172 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14174 error ("%<TYPE_FIELDS%> defined in incomplete type");
14175 error_found = true;
14177 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14179 /* TODO: verify properties of decls. */
14180 if (TREE_CODE (fld) == FIELD_DECL)
14182 else if (TREE_CODE (fld) == TYPE_DECL)
14184 else if (TREE_CODE (fld) == CONST_DECL)
14186 else if (VAR_P (fld))
14188 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14190 else if (TREE_CODE (fld) == USING_DECL)
14192 else if (TREE_CODE (fld) == FUNCTION_DECL)
14194 else
14196 error ("wrong tree in %<TYPE_FIELDS%> list");
14197 debug_tree (fld);
14198 error_found = true;
14202 else if (TREE_CODE (t) == INTEGER_TYPE
14203 || TREE_CODE (t) == BOOLEAN_TYPE
14204 || TREE_CODE (t) == OFFSET_TYPE
14205 || TREE_CODE (t) == REFERENCE_TYPE
14206 || TREE_CODE (t) == NULLPTR_TYPE
14207 || TREE_CODE (t) == POINTER_TYPE)
14209 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14211 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14212 "is %p",
14213 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14214 error_found = true;
14216 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14218 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14219 debug_tree (TYPE_CACHED_VALUES (t));
14220 error_found = true;
14222 /* Verify just enough of cache to ensure that no one copied it to new type.
14223 All copying should go by copy_node that should clear it. */
14224 else if (TYPE_CACHED_VALUES_P (t))
14226 int i;
14227 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14228 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14229 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14231 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14232 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14233 error_found = true;
14234 break;
14238 else if (FUNC_OR_METHOD_TYPE_P (t))
14239 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14241 /* C++ FE uses TREE_PURPOSE to store initial values. */
14242 if (TREE_PURPOSE (l) && in_lto_p)
14244 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14245 debug_tree (l);
14246 error_found = true;
14248 if (!TYPE_P (TREE_VALUE (l)))
14250 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14251 debug_tree (l);
14252 error_found = true;
14255 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14257 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14258 debug_tree (TYPE_VALUES_RAW (t));
14259 error_found = true;
14261 if (TREE_CODE (t) != INTEGER_TYPE
14262 && TREE_CODE (t) != BOOLEAN_TYPE
14263 && TREE_CODE (t) != OFFSET_TYPE
14264 && TREE_CODE (t) != REFERENCE_TYPE
14265 && TREE_CODE (t) != NULLPTR_TYPE
14266 && TREE_CODE (t) != POINTER_TYPE
14267 && TYPE_CACHED_VALUES_P (t))
14269 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14270 error_found = true;
14273 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14274 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14275 of a type. */
14276 if (TREE_CODE (t) == METHOD_TYPE
14277 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14279 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14280 error_found = true;
14283 if (error_found)
14285 debug_tree (const_cast <tree> (t));
14286 internal_error ("%qs failed", __func__);
14291 /* Return 1 if ARG interpreted as signed in its precision is known to be
14292 always positive or 2 if ARG is known to be always negative, or 3 if
14293 ARG may be positive or negative. */
14296 get_range_pos_neg (tree arg)
14298 if (arg == error_mark_node)
14299 return 3;
14301 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14302 int cnt = 0;
14303 if (TREE_CODE (arg) == INTEGER_CST)
14305 wide_int w = wi::sext (wi::to_wide (arg), prec);
14306 if (wi::neg_p (w))
14307 return 2;
14308 else
14309 return 1;
14311 while (CONVERT_EXPR_P (arg)
14312 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14313 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14315 arg = TREE_OPERAND (arg, 0);
14316 /* Narrower value zero extended into wider type
14317 will always result in positive values. */
14318 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14319 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14320 return 1;
14321 prec = TYPE_PRECISION (TREE_TYPE (arg));
14322 if (++cnt > 30)
14323 return 3;
14326 if (TREE_CODE (arg) != SSA_NAME)
14327 return 3;
14328 value_range r;
14329 while (!get_global_range_query ()->range_of_expr (r, arg)
14330 || r.undefined_p () || r.varying_p ())
14332 gimple *g = SSA_NAME_DEF_STMT (arg);
14333 if (is_gimple_assign (g)
14334 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14336 tree t = gimple_assign_rhs1 (g);
14337 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14338 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14340 if (TYPE_UNSIGNED (TREE_TYPE (t))
14341 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14342 return 1;
14343 prec = TYPE_PRECISION (TREE_TYPE (t));
14344 arg = t;
14345 if (++cnt > 30)
14346 return 3;
14347 continue;
14350 return 3;
14352 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14354 /* For unsigned values, the "positive" range comes
14355 below the "negative" range. */
14356 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14357 return 1;
14358 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14359 return 2;
14361 else
14363 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14364 return 1;
14365 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14366 return 2;
14368 return 3;
14374 /* Return true if ARG is marked with the nonnull attribute in the
14375 current function signature. */
14377 bool
14378 nonnull_arg_p (const_tree arg)
14380 tree t, attrs, fntype;
14381 unsigned HOST_WIDE_INT arg_num;
14383 gcc_assert (TREE_CODE (arg) == PARM_DECL
14384 && (POINTER_TYPE_P (TREE_TYPE (arg))
14385 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14387 /* The static chain decl is always non null. */
14388 if (arg == cfun->static_chain_decl)
14389 return true;
14391 /* THIS argument of method is always non-NULL. */
14392 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14393 && arg == DECL_ARGUMENTS (cfun->decl)
14394 && flag_delete_null_pointer_checks)
14395 return true;
14397 /* Values passed by reference are always non-NULL. */
14398 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14399 && flag_delete_null_pointer_checks)
14400 return true;
14402 fntype = TREE_TYPE (cfun->decl);
14403 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14405 attrs = lookup_attribute ("nonnull", attrs);
14407 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14408 if (attrs == NULL_TREE)
14409 return false;
14411 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14412 if (TREE_VALUE (attrs) == NULL_TREE)
14413 return true;
14415 /* Get the position number for ARG in the function signature. */
14416 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14418 t = DECL_CHAIN (t), arg_num++)
14420 if (t == arg)
14421 break;
14424 gcc_assert (t == arg);
14426 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14427 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14429 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14430 return true;
14434 return false;
14437 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14438 information. */
14440 location_t
14441 set_block (location_t loc, tree block)
14443 location_t pure_loc = get_pure_location (loc);
14444 source_range src_range = get_range_from_loc (line_table, loc);
14445 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14446 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14449 location_t
14450 set_source_range (tree expr, location_t start, location_t finish)
14452 source_range src_range;
14453 src_range.m_start = start;
14454 src_range.m_finish = finish;
14455 return set_source_range (expr, src_range);
14458 location_t
14459 set_source_range (tree expr, source_range src_range)
14461 if (!EXPR_P (expr))
14462 return UNKNOWN_LOCATION;
14464 location_t expr_location = EXPR_LOCATION (expr);
14465 location_t pure_loc = get_pure_location (expr_location);
14466 unsigned discriminator = get_discriminator_from_loc (expr_location);
14467 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14468 pure_loc,
14469 src_range,
14470 NULL,
14471 discriminator);
14472 SET_EXPR_LOCATION (expr, adhoc);
14473 return adhoc;
14476 /* Return EXPR, potentially wrapped with a node expression LOC,
14477 if !CAN_HAVE_LOCATION_P (expr).
14479 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14480 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14482 Wrapper nodes can be identified using location_wrapper_p. */
14484 tree
14485 maybe_wrap_with_location (tree expr, location_t loc)
14487 if (expr == NULL)
14488 return NULL;
14489 if (loc == UNKNOWN_LOCATION)
14490 return expr;
14491 if (CAN_HAVE_LOCATION_P (expr))
14492 return expr;
14493 /* We should only be adding wrappers for constants and for decls,
14494 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14495 gcc_assert (CONSTANT_CLASS_P (expr)
14496 || DECL_P (expr)
14497 || EXCEPTIONAL_CLASS_P (expr));
14499 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14500 any impact of the wrapper nodes. */
14501 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14502 return expr;
14504 /* Compiler-generated temporary variables don't need a wrapper. */
14505 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14506 return expr;
14508 /* If any auto_suppress_location_wrappers are active, don't create
14509 wrappers. */
14510 if (suppress_location_wrappers > 0)
14511 return expr;
14513 tree_code code
14514 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14515 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14516 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14517 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14518 /* Mark this node as being a wrapper. */
14519 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14520 return wrapper;
14523 int suppress_location_wrappers;
14525 /* Return the name of combined function FN, for debugging purposes. */
14527 const char *
14528 combined_fn_name (combined_fn fn)
14530 if (builtin_fn_p (fn))
14532 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14533 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14535 else
14536 return internal_fn_name (as_internal_fn (fn));
14539 /* Return a bitmap with a bit set corresponding to each argument in
14540 a function call type FNTYPE declared with attribute nonnull,
14541 or null if none of the function's argument are nonnull. The caller
14542 must free the bitmap. */
14544 bitmap
14545 get_nonnull_args (const_tree fntype)
14547 if (fntype == NULL_TREE)
14548 return NULL;
14550 bitmap argmap = NULL;
14551 if (TREE_CODE (fntype) == METHOD_TYPE)
14553 /* The this pointer in C++ non-static member functions is
14554 implicitly nonnull whether or not it's declared as such. */
14555 argmap = BITMAP_ALLOC (NULL);
14556 bitmap_set_bit (argmap, 0);
14559 tree attrs = TYPE_ATTRIBUTES (fntype);
14560 if (!attrs)
14561 return argmap;
14563 /* A function declaration can specify multiple attribute nonnull,
14564 each with zero or more arguments. The loop below creates a bitmap
14565 representing a union of all the arguments. An empty (but non-null)
14566 bitmap means that all arguments have been declaraed nonnull. */
14567 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14569 attrs = lookup_attribute ("nonnull", attrs);
14570 if (!attrs)
14571 break;
14573 if (!argmap)
14574 argmap = BITMAP_ALLOC (NULL);
14576 if (!TREE_VALUE (attrs))
14578 /* Clear the bitmap in case a previous attribute nonnull
14579 set it and this one overrides it for all arguments. */
14580 bitmap_clear (argmap);
14581 return argmap;
14584 /* Iterate over the indices of the format arguments declared nonnull
14585 and set a bit for each. */
14586 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14588 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14589 bitmap_set_bit (argmap, val);
14593 return argmap;
14596 /* Returns true if TYPE is a type where it and all of its subobjects
14597 (recursively) are of structure, union, or array type. */
14599 bool
14600 is_empty_type (const_tree type)
14602 if (RECORD_OR_UNION_TYPE_P (type))
14604 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14605 if (TREE_CODE (field) == FIELD_DECL
14606 && !DECL_PADDING_P (field)
14607 && !is_empty_type (TREE_TYPE (field)))
14608 return false;
14609 return true;
14611 else if (TREE_CODE (type) == ARRAY_TYPE)
14612 return (integer_minus_onep (array_type_nelts (type))
14613 || TYPE_DOMAIN (type) == NULL_TREE
14614 || is_empty_type (TREE_TYPE (type)));
14615 return false;
14618 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14619 that shouldn't be passed via stack. */
14621 bool
14622 default_is_empty_record (const_tree type)
14624 if (!abi_version_at_least (12))
14625 return false;
14627 if (type == error_mark_node)
14628 return false;
14630 if (TREE_ADDRESSABLE (type))
14631 return false;
14633 return is_empty_type (TYPE_MAIN_VARIANT (type));
14636 /* Determine whether TYPE is a structure with a flexible array member,
14637 or a union containing such a structure (possibly recursively). */
14639 bool
14640 flexible_array_type_p (const_tree type)
14642 tree x, last;
14643 switch (TREE_CODE (type))
14645 case RECORD_TYPE:
14646 last = NULL_TREE;
14647 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14648 if (TREE_CODE (x) == FIELD_DECL)
14649 last = x;
14650 if (last == NULL_TREE)
14651 return false;
14652 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14653 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14654 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14655 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14656 return true;
14657 return false;
14658 case UNION_TYPE:
14659 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14661 if (TREE_CODE (x) == FIELD_DECL
14662 && flexible_array_type_p (TREE_TYPE (x)))
14663 return true;
14665 return false;
14666 default:
14667 return false;
14671 /* Like int_size_in_bytes, but handle empty records specially. */
14673 HOST_WIDE_INT
14674 arg_int_size_in_bytes (const_tree type)
14676 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14679 /* Like size_in_bytes, but handle empty records specially. */
14681 tree
14682 arg_size_in_bytes (const_tree type)
14684 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14687 /* Return true if an expression with CODE has to have the same result type as
14688 its first operand. */
14690 bool
14691 expr_type_first_operand_type_p (tree_code code)
14693 switch (code)
14695 case NEGATE_EXPR:
14696 case ABS_EXPR:
14697 case BIT_NOT_EXPR:
14698 case PAREN_EXPR:
14699 case CONJ_EXPR:
14701 case PLUS_EXPR:
14702 case MINUS_EXPR:
14703 case MULT_EXPR:
14704 case TRUNC_DIV_EXPR:
14705 case CEIL_DIV_EXPR:
14706 case FLOOR_DIV_EXPR:
14707 case ROUND_DIV_EXPR:
14708 case TRUNC_MOD_EXPR:
14709 case CEIL_MOD_EXPR:
14710 case FLOOR_MOD_EXPR:
14711 case ROUND_MOD_EXPR:
14712 case RDIV_EXPR:
14713 case EXACT_DIV_EXPR:
14714 case MIN_EXPR:
14715 case MAX_EXPR:
14716 case BIT_IOR_EXPR:
14717 case BIT_XOR_EXPR:
14718 case BIT_AND_EXPR:
14720 case LSHIFT_EXPR:
14721 case RSHIFT_EXPR:
14722 case LROTATE_EXPR:
14723 case RROTATE_EXPR:
14724 return true;
14726 default:
14727 return false;
14731 /* Return a typenode for the "standard" C type with a given name. */
14732 tree
14733 get_typenode_from_name (const char *name)
14735 if (name == NULL || *name == '\0')
14736 return NULL_TREE;
14738 if (strcmp (name, "char") == 0)
14739 return char_type_node;
14740 if (strcmp (name, "unsigned char") == 0)
14741 return unsigned_char_type_node;
14742 if (strcmp (name, "signed char") == 0)
14743 return signed_char_type_node;
14745 if (strcmp (name, "short int") == 0)
14746 return short_integer_type_node;
14747 if (strcmp (name, "short unsigned int") == 0)
14748 return short_unsigned_type_node;
14750 if (strcmp (name, "int") == 0)
14751 return integer_type_node;
14752 if (strcmp (name, "unsigned int") == 0)
14753 return unsigned_type_node;
14755 if (strcmp (name, "long int") == 0)
14756 return long_integer_type_node;
14757 if (strcmp (name, "long unsigned int") == 0)
14758 return long_unsigned_type_node;
14760 if (strcmp (name, "long long int") == 0)
14761 return long_long_integer_type_node;
14762 if (strcmp (name, "long long unsigned int") == 0)
14763 return long_long_unsigned_type_node;
14765 gcc_unreachable ();
14768 /* List of pointer types used to declare builtins before we have seen their
14769 real declaration.
14771 Keep the size up to date in tree.h ! */
14772 const builtin_structptr_type builtin_structptr_types[6] =
14774 { fileptr_type_node, ptr_type_node, "FILE" },
14775 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14776 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14777 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14778 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14779 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14782 /* Return the maximum object size. */
14784 tree
14785 max_object_size (void)
14787 /* To do: Make this a configurable parameter. */
14788 return TYPE_MAX_VALUE (ptrdiff_type_node);
14791 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14792 parameter default to false and that weeds out error_mark_node. */
14794 bool
14795 verify_type_context (location_t loc, type_context_kind context,
14796 const_tree type, bool silent_p)
14798 if (type == error_mark_node)
14799 return true;
14801 gcc_assert (TYPE_P (type));
14802 return (!targetm.verify_type_context
14803 || targetm.verify_type_context (loc, context, type, silent_p));
14806 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14807 delete operators. Return false if they may or may not name such
14808 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14809 do not. */
14811 bool
14812 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14813 bool *pcertain /* = NULL */)
14815 bool certain;
14816 if (!pcertain)
14817 pcertain = &certain;
14819 const char *new_name = IDENTIFIER_POINTER (new_asm);
14820 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14821 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14822 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14824 /* The following failures are due to invalid names so they're not
14825 considered certain mismatches. */
14826 *pcertain = false;
14828 if (new_len < 5 || delete_len < 6)
14829 return false;
14830 if (new_name[0] == '_')
14831 ++new_name, --new_len;
14832 if (new_name[0] == '_')
14833 ++new_name, --new_len;
14834 if (delete_name[0] == '_')
14835 ++delete_name, --delete_len;
14836 if (delete_name[0] == '_')
14837 ++delete_name, --delete_len;
14838 if (new_len < 4 || delete_len < 5)
14839 return false;
14841 /* The following failures are due to names of user-defined operators
14842 so they're also not considered certain mismatches. */
14844 /* *_len is now just the length after initial underscores. */
14845 if (new_name[0] != 'Z' || new_name[1] != 'n')
14846 return false;
14847 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14848 return false;
14850 /* The following failures are certain mismatches. */
14851 *pcertain = true;
14853 /* _Znw must match _Zdl, _Zna must match _Zda. */
14854 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14855 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14856 return false;
14857 /* 'j', 'm' and 'y' correspond to size_t. */
14858 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14859 return false;
14860 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14861 return false;
14862 if (new_len == 4
14863 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14865 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14866 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14867 if (delete_len == 5)
14868 return true;
14869 if (delete_len == 6 && delete_name[5] == new_name[3])
14870 return true;
14871 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14872 return true;
14874 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14875 || (new_len == 33
14876 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14878 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14879 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14880 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14881 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14882 return true;
14883 if (delete_len == 21
14884 && delete_name[5] == new_name[3]
14885 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14886 return true;
14887 if (delete_len == 34
14888 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14889 return true;
14892 /* The negative result is conservative. */
14893 *pcertain = false;
14894 return false;
14897 /* Return the zero-based number corresponding to the argument being
14898 deallocated if FNDECL is a deallocation function or an out-of-bounds
14899 value if it isn't. */
14901 unsigned
14902 fndecl_dealloc_argno (tree fndecl)
14904 /* A call to operator delete isn't recognized as one to a built-in. */
14905 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14907 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14908 return 0;
14910 /* Avoid placement delete that's not been inlined. */
14911 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14912 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14913 || id_equal (fname, "_ZdaPvS_")) // array form
14914 return UINT_MAX;
14915 return 0;
14918 /* TODO: Handle user-defined functions with attribute malloc? Handle
14919 known non-built-ins like fopen? */
14920 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14922 switch (DECL_FUNCTION_CODE (fndecl))
14924 case BUILT_IN_FREE:
14925 case BUILT_IN_REALLOC:
14926 return 0;
14927 default:
14928 break;
14930 return UINT_MAX;
14933 tree attrs = DECL_ATTRIBUTES (fndecl);
14934 if (!attrs)
14935 return UINT_MAX;
14937 for (tree atfree = attrs;
14938 (atfree = lookup_attribute ("*dealloc", atfree));
14939 atfree = TREE_CHAIN (atfree))
14941 tree alloc = TREE_VALUE (atfree);
14942 if (!alloc)
14943 continue;
14945 tree pos = TREE_CHAIN (alloc);
14946 if (!pos)
14947 return 0;
14949 pos = TREE_VALUE (pos);
14950 return TREE_INT_CST_LOW (pos) - 1;
14953 return UINT_MAX;
14956 /* If EXPR refers to a character array or pointer declared attribute
14957 nonstring, return a decl for that array or pointer and set *REF
14958 to the referenced enclosing object or pointer. Otherwise return
14959 null. */
14961 tree
14962 get_attr_nonstring_decl (tree expr, tree *ref)
14964 tree decl = expr;
14965 tree var = NULL_TREE;
14966 if (TREE_CODE (decl) == SSA_NAME)
14968 gimple *def = SSA_NAME_DEF_STMT (decl);
14970 if (is_gimple_assign (def))
14972 tree_code code = gimple_assign_rhs_code (def);
14973 if (code == ADDR_EXPR
14974 || code == COMPONENT_REF
14975 || code == VAR_DECL)
14976 decl = gimple_assign_rhs1 (def);
14978 else
14979 var = SSA_NAME_VAR (decl);
14982 if (TREE_CODE (decl) == ADDR_EXPR)
14983 decl = TREE_OPERAND (decl, 0);
14985 /* To simplify calling code, store the referenced DECL regardless of
14986 the attribute determined below, but avoid storing the SSA_NAME_VAR
14987 obtained above (it's not useful for dataflow purposes). */
14988 if (ref)
14989 *ref = decl;
14991 /* Use the SSA_NAME_VAR that was determined above to see if it's
14992 declared nonstring. Otherwise drill down into the referenced
14993 DECL. */
14994 if (var)
14995 decl = var;
14996 else if (TREE_CODE (decl) == ARRAY_REF)
14997 decl = TREE_OPERAND (decl, 0);
14998 else if (TREE_CODE (decl) == COMPONENT_REF)
14999 decl = TREE_OPERAND (decl, 1);
15000 else if (TREE_CODE (decl) == MEM_REF)
15001 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15003 if (DECL_P (decl)
15004 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15005 return decl;
15007 return NULL_TREE;
15010 /* Return length of attribute names string,
15011 if arglist chain > 1, -1 otherwise. */
15014 get_target_clone_attr_len (tree arglist)
15016 tree arg;
15017 int str_len_sum = 0;
15018 int argnum = 0;
15020 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15022 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15023 size_t len = strlen (str);
15024 str_len_sum += len + 1;
15025 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15026 argnum++;
15027 argnum++;
15029 if (argnum <= 1)
15030 return -1;
15031 return str_len_sum;
15034 void
15035 tree_cc_finalize (void)
15037 clear_nonstandard_integer_type_cache ();
15040 #if CHECKING_P
15042 namespace selftest {
15044 /* Selftests for tree. */
15046 /* Verify that integer constants are sane. */
15048 static void
15049 test_integer_constants ()
15051 ASSERT_TRUE (integer_type_node != NULL);
15052 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15054 tree type = integer_type_node;
15056 tree zero = build_zero_cst (type);
15057 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15058 ASSERT_EQ (type, TREE_TYPE (zero));
15060 tree one = build_int_cst (type, 1);
15061 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15062 ASSERT_EQ (type, TREE_TYPE (zero));
15065 /* Verify identifiers. */
15067 static void
15068 test_identifiers ()
15070 tree identifier = get_identifier ("foo");
15071 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15072 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15075 /* Verify LABEL_DECL. */
15077 static void
15078 test_labels ()
15080 tree identifier = get_identifier ("err");
15081 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15082 identifier, void_type_node);
15083 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15084 ASSERT_FALSE (FORCED_LABEL (label_decl));
15087 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15088 are given by VALS. */
15090 static tree
15091 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15093 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15094 tree_vector_builder builder (type, vals.length (), 1);
15095 builder.splice (vals);
15096 return builder.build ();
15099 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15101 static void
15102 check_vector_cst (const vec<tree> &expected, tree actual)
15104 ASSERT_KNOWN_EQ (expected.length (),
15105 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15106 for (unsigned int i = 0; i < expected.length (); ++i)
15107 ASSERT_EQ (wi::to_wide (expected[i]),
15108 wi::to_wide (vector_cst_elt (actual, i)));
15111 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15112 and that its elements match EXPECTED. */
15114 static void
15115 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15116 unsigned int npatterns)
15118 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15119 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15120 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15121 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15122 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15123 check_vector_cst (expected, actual);
15126 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15127 and NPATTERNS background elements, and that its elements match
15128 EXPECTED. */
15130 static void
15131 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15132 unsigned int npatterns)
15134 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15135 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15136 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15137 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15138 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15139 check_vector_cst (expected, actual);
15142 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15143 and that its elements match EXPECTED. */
15145 static void
15146 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15147 unsigned int npatterns)
15149 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15150 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15151 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15152 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15153 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15154 check_vector_cst (expected, actual);
15157 /* Test the creation of VECTOR_CSTs. */
15159 static void
15160 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15162 auto_vec<tree, 8> elements (8);
15163 elements.quick_grow (8);
15164 tree element_type = build_nonstandard_integer_type (16, true);
15165 tree vector_type = build_vector_type (element_type, 8);
15167 /* Test a simple linear series with a base of 0 and a step of 1:
15168 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15169 for (unsigned int i = 0; i < 8; ++i)
15170 elements[i] = build_int_cst (element_type, i);
15171 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15172 check_vector_cst_stepped (elements, vector, 1);
15174 /* Try the same with the first element replaced by 100:
15175 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15176 elements[0] = build_int_cst (element_type, 100);
15177 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15178 check_vector_cst_stepped (elements, vector, 1);
15180 /* Try a series that wraps around.
15181 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15182 for (unsigned int i = 1; i < 8; ++i)
15183 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15184 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15185 check_vector_cst_stepped (elements, vector, 1);
15187 /* Try a downward series:
15188 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15189 for (unsigned int i = 1; i < 8; ++i)
15190 elements[i] = build_int_cst (element_type, 80 - i);
15191 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15192 check_vector_cst_stepped (elements, vector, 1);
15194 /* Try two interleaved series with different bases and steps:
15195 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15196 elements[1] = build_int_cst (element_type, 53);
15197 for (unsigned int i = 2; i < 8; i += 2)
15199 elements[i] = build_int_cst (element_type, 70 - i * 2);
15200 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15202 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15203 check_vector_cst_stepped (elements, vector, 2);
15205 /* Try a duplicated value:
15206 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15207 for (unsigned int i = 1; i < 8; ++i)
15208 elements[i] = elements[0];
15209 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15210 check_vector_cst_duplicate (elements, vector, 1);
15212 /* Try an interleaved duplicated value:
15213 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15214 elements[1] = build_int_cst (element_type, 55);
15215 for (unsigned int i = 2; i < 8; ++i)
15216 elements[i] = elements[i - 2];
15217 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15218 check_vector_cst_duplicate (elements, vector, 2);
15220 /* Try a duplicated value with 2 exceptions
15221 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15222 elements[0] = build_int_cst (element_type, 41);
15223 elements[1] = build_int_cst (element_type, 97);
15224 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15225 check_vector_cst_fill (elements, vector, 2);
15227 /* Try with and without a step
15228 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15229 for (unsigned int i = 3; i < 8; i += 2)
15230 elements[i] = build_int_cst (element_type, i * 7);
15231 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15232 check_vector_cst_stepped (elements, vector, 2);
15234 /* Try a fully-general constant:
15235 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15236 elements[5] = build_int_cst (element_type, 9990);
15237 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15238 check_vector_cst_fill (elements, vector, 4);
15241 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15242 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15243 modifying its argument in-place. */
15245 static void
15246 check_strip_nops (tree node, tree expected)
15248 STRIP_NOPS (node);
15249 ASSERT_EQ (expected, node);
15252 /* Verify location wrappers. */
15254 static void
15255 test_location_wrappers ()
15257 location_t loc = BUILTINS_LOCATION;
15259 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15261 /* Wrapping a constant. */
15262 tree int_cst = build_int_cst (integer_type_node, 42);
15263 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15264 ASSERT_FALSE (location_wrapper_p (int_cst));
15266 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15267 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15268 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15269 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15271 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15272 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15274 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15275 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15276 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15277 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15279 /* Wrapping a STRING_CST. */
15280 tree string_cst = build_string (4, "foo");
15281 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15282 ASSERT_FALSE (location_wrapper_p (string_cst));
15284 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15285 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15286 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15287 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15288 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15291 /* Wrapping a variable. */
15292 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15293 get_identifier ("some_int_var"),
15294 integer_type_node);
15295 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15296 ASSERT_FALSE (location_wrapper_p (int_var));
15298 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15299 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15300 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15301 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15303 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15304 wrapper. */
15305 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15306 ASSERT_FALSE (location_wrapper_p (r_cast));
15307 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15309 /* Verify that STRIP_NOPS removes wrappers. */
15310 check_strip_nops (wrapped_int_cst, int_cst);
15311 check_strip_nops (wrapped_string_cst, string_cst);
15312 check_strip_nops (wrapped_int_var, int_var);
15315 /* Test various tree predicates. Verify that location wrappers don't
15316 affect the results. */
15318 static void
15319 test_predicates ()
15321 /* Build various constants and wrappers around them. */
15323 location_t loc = BUILTINS_LOCATION;
15325 tree i_0 = build_int_cst (integer_type_node, 0);
15326 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15328 tree i_1 = build_int_cst (integer_type_node, 1);
15329 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15331 tree i_m1 = build_int_cst (integer_type_node, -1);
15332 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15334 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15335 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15336 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15337 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15338 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15339 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15341 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15342 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15343 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15345 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15346 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15347 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15349 /* TODO: vector constants. */
15351 /* Test integer_onep. */
15352 ASSERT_FALSE (integer_onep (i_0));
15353 ASSERT_FALSE (integer_onep (wr_i_0));
15354 ASSERT_TRUE (integer_onep (i_1));
15355 ASSERT_TRUE (integer_onep (wr_i_1));
15356 ASSERT_FALSE (integer_onep (i_m1));
15357 ASSERT_FALSE (integer_onep (wr_i_m1));
15358 ASSERT_FALSE (integer_onep (f_0));
15359 ASSERT_FALSE (integer_onep (wr_f_0));
15360 ASSERT_FALSE (integer_onep (f_1));
15361 ASSERT_FALSE (integer_onep (wr_f_1));
15362 ASSERT_FALSE (integer_onep (f_m1));
15363 ASSERT_FALSE (integer_onep (wr_f_m1));
15364 ASSERT_FALSE (integer_onep (c_i_0));
15365 ASSERT_TRUE (integer_onep (c_i_1));
15366 ASSERT_FALSE (integer_onep (c_i_m1));
15367 ASSERT_FALSE (integer_onep (c_f_0));
15368 ASSERT_FALSE (integer_onep (c_f_1));
15369 ASSERT_FALSE (integer_onep (c_f_m1));
15371 /* Test integer_zerop. */
15372 ASSERT_TRUE (integer_zerop (i_0));
15373 ASSERT_TRUE (integer_zerop (wr_i_0));
15374 ASSERT_FALSE (integer_zerop (i_1));
15375 ASSERT_FALSE (integer_zerop (wr_i_1));
15376 ASSERT_FALSE (integer_zerop (i_m1));
15377 ASSERT_FALSE (integer_zerop (wr_i_m1));
15378 ASSERT_FALSE (integer_zerop (f_0));
15379 ASSERT_FALSE (integer_zerop (wr_f_0));
15380 ASSERT_FALSE (integer_zerop (f_1));
15381 ASSERT_FALSE (integer_zerop (wr_f_1));
15382 ASSERT_FALSE (integer_zerop (f_m1));
15383 ASSERT_FALSE (integer_zerop (wr_f_m1));
15384 ASSERT_TRUE (integer_zerop (c_i_0));
15385 ASSERT_FALSE (integer_zerop (c_i_1));
15386 ASSERT_FALSE (integer_zerop (c_i_m1));
15387 ASSERT_FALSE (integer_zerop (c_f_0));
15388 ASSERT_FALSE (integer_zerop (c_f_1));
15389 ASSERT_FALSE (integer_zerop (c_f_m1));
15391 /* Test integer_all_onesp. */
15392 ASSERT_FALSE (integer_all_onesp (i_0));
15393 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15394 ASSERT_FALSE (integer_all_onesp (i_1));
15395 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15396 ASSERT_TRUE (integer_all_onesp (i_m1));
15397 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15398 ASSERT_FALSE (integer_all_onesp (f_0));
15399 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15400 ASSERT_FALSE (integer_all_onesp (f_1));
15401 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15402 ASSERT_FALSE (integer_all_onesp (f_m1));
15403 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15404 ASSERT_FALSE (integer_all_onesp (c_i_0));
15405 ASSERT_FALSE (integer_all_onesp (c_i_1));
15406 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15407 ASSERT_FALSE (integer_all_onesp (c_f_0));
15408 ASSERT_FALSE (integer_all_onesp (c_f_1));
15409 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15411 /* Test integer_minus_onep. */
15412 ASSERT_FALSE (integer_minus_onep (i_0));
15413 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15414 ASSERT_FALSE (integer_minus_onep (i_1));
15415 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15416 ASSERT_TRUE (integer_minus_onep (i_m1));
15417 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15418 ASSERT_FALSE (integer_minus_onep (f_0));
15419 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15420 ASSERT_FALSE (integer_minus_onep (f_1));
15421 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15422 ASSERT_FALSE (integer_minus_onep (f_m1));
15423 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15424 ASSERT_FALSE (integer_minus_onep (c_i_0));
15425 ASSERT_FALSE (integer_minus_onep (c_i_1));
15426 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15427 ASSERT_FALSE (integer_minus_onep (c_f_0));
15428 ASSERT_FALSE (integer_minus_onep (c_f_1));
15429 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15431 /* Test integer_each_onep. */
15432 ASSERT_FALSE (integer_each_onep (i_0));
15433 ASSERT_FALSE (integer_each_onep (wr_i_0));
15434 ASSERT_TRUE (integer_each_onep (i_1));
15435 ASSERT_TRUE (integer_each_onep (wr_i_1));
15436 ASSERT_FALSE (integer_each_onep (i_m1));
15437 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15438 ASSERT_FALSE (integer_each_onep (f_0));
15439 ASSERT_FALSE (integer_each_onep (wr_f_0));
15440 ASSERT_FALSE (integer_each_onep (f_1));
15441 ASSERT_FALSE (integer_each_onep (wr_f_1));
15442 ASSERT_FALSE (integer_each_onep (f_m1));
15443 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15444 ASSERT_FALSE (integer_each_onep (c_i_0));
15445 ASSERT_FALSE (integer_each_onep (c_i_1));
15446 ASSERT_FALSE (integer_each_onep (c_i_m1));
15447 ASSERT_FALSE (integer_each_onep (c_f_0));
15448 ASSERT_FALSE (integer_each_onep (c_f_1));
15449 ASSERT_FALSE (integer_each_onep (c_f_m1));
15451 /* Test integer_truep. */
15452 ASSERT_FALSE (integer_truep (i_0));
15453 ASSERT_FALSE (integer_truep (wr_i_0));
15454 ASSERT_TRUE (integer_truep (i_1));
15455 ASSERT_TRUE (integer_truep (wr_i_1));
15456 ASSERT_FALSE (integer_truep (i_m1));
15457 ASSERT_FALSE (integer_truep (wr_i_m1));
15458 ASSERT_FALSE (integer_truep (f_0));
15459 ASSERT_FALSE (integer_truep (wr_f_0));
15460 ASSERT_FALSE (integer_truep (f_1));
15461 ASSERT_FALSE (integer_truep (wr_f_1));
15462 ASSERT_FALSE (integer_truep (f_m1));
15463 ASSERT_FALSE (integer_truep (wr_f_m1));
15464 ASSERT_FALSE (integer_truep (c_i_0));
15465 ASSERT_TRUE (integer_truep (c_i_1));
15466 ASSERT_FALSE (integer_truep (c_i_m1));
15467 ASSERT_FALSE (integer_truep (c_f_0));
15468 ASSERT_FALSE (integer_truep (c_f_1));
15469 ASSERT_FALSE (integer_truep (c_f_m1));
15471 /* Test integer_nonzerop. */
15472 ASSERT_FALSE (integer_nonzerop (i_0));
15473 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15474 ASSERT_TRUE (integer_nonzerop (i_1));
15475 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15476 ASSERT_TRUE (integer_nonzerop (i_m1));
15477 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15478 ASSERT_FALSE (integer_nonzerop (f_0));
15479 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15480 ASSERT_FALSE (integer_nonzerop (f_1));
15481 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15482 ASSERT_FALSE (integer_nonzerop (f_m1));
15483 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15484 ASSERT_FALSE (integer_nonzerop (c_i_0));
15485 ASSERT_TRUE (integer_nonzerop (c_i_1));
15486 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15487 ASSERT_FALSE (integer_nonzerop (c_f_0));
15488 ASSERT_FALSE (integer_nonzerop (c_f_1));
15489 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15491 /* Test real_zerop. */
15492 ASSERT_FALSE (real_zerop (i_0));
15493 ASSERT_FALSE (real_zerop (wr_i_0));
15494 ASSERT_FALSE (real_zerop (i_1));
15495 ASSERT_FALSE (real_zerop (wr_i_1));
15496 ASSERT_FALSE (real_zerop (i_m1));
15497 ASSERT_FALSE (real_zerop (wr_i_m1));
15498 ASSERT_TRUE (real_zerop (f_0));
15499 ASSERT_TRUE (real_zerop (wr_f_0));
15500 ASSERT_FALSE (real_zerop (f_1));
15501 ASSERT_FALSE (real_zerop (wr_f_1));
15502 ASSERT_FALSE (real_zerop (f_m1));
15503 ASSERT_FALSE (real_zerop (wr_f_m1));
15504 ASSERT_FALSE (real_zerop (c_i_0));
15505 ASSERT_FALSE (real_zerop (c_i_1));
15506 ASSERT_FALSE (real_zerop (c_i_m1));
15507 ASSERT_TRUE (real_zerop (c_f_0));
15508 ASSERT_FALSE (real_zerop (c_f_1));
15509 ASSERT_FALSE (real_zerop (c_f_m1));
15511 /* Test real_onep. */
15512 ASSERT_FALSE (real_onep (i_0));
15513 ASSERT_FALSE (real_onep (wr_i_0));
15514 ASSERT_FALSE (real_onep (i_1));
15515 ASSERT_FALSE (real_onep (wr_i_1));
15516 ASSERT_FALSE (real_onep (i_m1));
15517 ASSERT_FALSE (real_onep (wr_i_m1));
15518 ASSERT_FALSE (real_onep (f_0));
15519 ASSERT_FALSE (real_onep (wr_f_0));
15520 ASSERT_TRUE (real_onep (f_1));
15521 ASSERT_TRUE (real_onep (wr_f_1));
15522 ASSERT_FALSE (real_onep (f_m1));
15523 ASSERT_FALSE (real_onep (wr_f_m1));
15524 ASSERT_FALSE (real_onep (c_i_0));
15525 ASSERT_FALSE (real_onep (c_i_1));
15526 ASSERT_FALSE (real_onep (c_i_m1));
15527 ASSERT_FALSE (real_onep (c_f_0));
15528 ASSERT_TRUE (real_onep (c_f_1));
15529 ASSERT_FALSE (real_onep (c_f_m1));
15531 /* Test real_minus_onep. */
15532 ASSERT_FALSE (real_minus_onep (i_0));
15533 ASSERT_FALSE (real_minus_onep (wr_i_0));
15534 ASSERT_FALSE (real_minus_onep (i_1));
15535 ASSERT_FALSE (real_minus_onep (wr_i_1));
15536 ASSERT_FALSE (real_minus_onep (i_m1));
15537 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15538 ASSERT_FALSE (real_minus_onep (f_0));
15539 ASSERT_FALSE (real_minus_onep (wr_f_0));
15540 ASSERT_FALSE (real_minus_onep (f_1));
15541 ASSERT_FALSE (real_minus_onep (wr_f_1));
15542 ASSERT_TRUE (real_minus_onep (f_m1));
15543 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15544 ASSERT_FALSE (real_minus_onep (c_i_0));
15545 ASSERT_FALSE (real_minus_onep (c_i_1));
15546 ASSERT_FALSE (real_minus_onep (c_i_m1));
15547 ASSERT_FALSE (real_minus_onep (c_f_0));
15548 ASSERT_FALSE (real_minus_onep (c_f_1));
15549 ASSERT_TRUE (real_minus_onep (c_f_m1));
15551 /* Test zerop. */
15552 ASSERT_TRUE (zerop (i_0));
15553 ASSERT_TRUE (zerop (wr_i_0));
15554 ASSERT_FALSE (zerop (i_1));
15555 ASSERT_FALSE (zerop (wr_i_1));
15556 ASSERT_FALSE (zerop (i_m1));
15557 ASSERT_FALSE (zerop (wr_i_m1));
15558 ASSERT_TRUE (zerop (f_0));
15559 ASSERT_TRUE (zerop (wr_f_0));
15560 ASSERT_FALSE (zerop (f_1));
15561 ASSERT_FALSE (zerop (wr_f_1));
15562 ASSERT_FALSE (zerop (f_m1));
15563 ASSERT_FALSE (zerop (wr_f_m1));
15564 ASSERT_TRUE (zerop (c_i_0));
15565 ASSERT_FALSE (zerop (c_i_1));
15566 ASSERT_FALSE (zerop (c_i_m1));
15567 ASSERT_TRUE (zerop (c_f_0));
15568 ASSERT_FALSE (zerop (c_f_1));
15569 ASSERT_FALSE (zerop (c_f_m1));
15571 /* Test tree_expr_nonnegative_p. */
15572 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15573 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15574 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15575 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15576 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15577 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15578 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15579 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15580 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15581 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15582 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15583 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15584 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15585 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15586 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15587 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15588 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15589 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15591 /* Test tree_expr_nonzero_p. */
15592 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15593 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15594 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15595 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15596 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15597 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15599 /* Test integer_valued_real_p. */
15600 ASSERT_FALSE (integer_valued_real_p (i_0));
15601 ASSERT_TRUE (integer_valued_real_p (f_0));
15602 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15603 ASSERT_TRUE (integer_valued_real_p (f_1));
15604 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15606 /* Test integer_pow2p. */
15607 ASSERT_FALSE (integer_pow2p (i_0));
15608 ASSERT_TRUE (integer_pow2p (i_1));
15609 ASSERT_TRUE (integer_pow2p (wr_i_1));
15611 /* Test uniform_integer_cst_p. */
15612 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15613 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15614 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15615 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15616 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15617 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15618 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15619 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15620 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15621 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15622 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15623 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15624 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15625 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15626 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15627 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15628 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15629 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15632 /* Check that string escaping works correctly. */
15634 static void
15635 test_escaped_strings (void)
15637 int saved_cutoff;
15638 escaped_string msg;
15640 msg.escape (NULL);
15641 /* ASSERT_STREQ does not accept NULL as a valid test
15642 result, so we have to use ASSERT_EQ instead. */
15643 ASSERT_EQ (NULL, (const char *) msg);
15645 msg.escape ("");
15646 ASSERT_STREQ ("", (const char *) msg);
15648 msg.escape ("foobar");
15649 ASSERT_STREQ ("foobar", (const char *) msg);
15651 /* Ensure that we have -fmessage-length set to 0. */
15652 saved_cutoff = pp_line_cutoff (global_dc->printer);
15653 pp_line_cutoff (global_dc->printer) = 0;
15655 msg.escape ("foo\nbar");
15656 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15658 msg.escape ("\a\b\f\n\r\t\v");
15659 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15661 /* Now repeat the tests with -fmessage-length set to 5. */
15662 pp_line_cutoff (global_dc->printer) = 5;
15664 /* Note that the newline is not translated into an escape. */
15665 msg.escape ("foo\nbar");
15666 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15668 msg.escape ("\a\b\f\n\r\t\v");
15669 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15671 /* Restore the original message length setting. */
15672 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15675 /* Run all of the selftests within this file. */
15677 void
15678 tree_cc_tests ()
15680 test_integer_constants ();
15681 test_identifiers ();
15682 test_labels ();
15683 test_vector_cst_patterns ();
15684 test_location_wrappers ();
15685 test_predicates ();
15686 test_escaped_strings ();
15689 } // namespace selftest
15691 #endif /* CHECKING_P */
15693 #include "gt-tree.h"