RISC-V: Add RISC-V into vect_cmdline_needed
[official-gcc.git] / gcc / tree.cc
blob9c9b057cd88ccddb423b9383b9df08964620e806
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_SELF */
284 1, /* OMP_CLAUSE_NUM_THREADS */
285 1, /* OMP_CLAUSE_SCHEDULE */
286 0, /* OMP_CLAUSE_NOWAIT */
287 1, /* OMP_CLAUSE_ORDERED */
288 0, /* OMP_CLAUSE_DEFAULT */
289 3, /* OMP_CLAUSE_COLLAPSE */
290 0, /* OMP_CLAUSE_UNTIED */
291 1, /* OMP_CLAUSE_FINAL */
292 0, /* OMP_CLAUSE_MERGEABLE */
293 1, /* OMP_CLAUSE_DEVICE */
294 1, /* OMP_CLAUSE_DIST_SCHEDULE */
295 0, /* OMP_CLAUSE_INBRANCH */
296 0, /* OMP_CLAUSE_NOTINBRANCH */
297 2, /* OMP_CLAUSE_NUM_TEAMS */
298 1, /* OMP_CLAUSE_THREAD_LIMIT */
299 0, /* OMP_CLAUSE_PROC_BIND */
300 1, /* OMP_CLAUSE_SAFELEN */
301 1, /* OMP_CLAUSE_SIMDLEN */
302 0, /* OMP_CLAUSE_DEVICE_TYPE */
303 0, /* OMP_CLAUSE_FOR */
304 0, /* OMP_CLAUSE_PARALLEL */
305 0, /* OMP_CLAUSE_SECTIONS */
306 0, /* OMP_CLAUSE_TASKGROUP */
307 1, /* OMP_CLAUSE_PRIORITY */
308 1, /* OMP_CLAUSE_GRAINSIZE */
309 1, /* OMP_CLAUSE_NUM_TASKS */
310 0, /* OMP_CLAUSE_NOGROUP */
311 0, /* OMP_CLAUSE_THREADS */
312 0, /* OMP_CLAUSE_SIMD */
313 1, /* OMP_CLAUSE_HINT */
314 0, /* OMP_CLAUSE_DEFAULTMAP */
315 0, /* OMP_CLAUSE_ORDER */
316 0, /* OMP_CLAUSE_BIND */
317 1, /* OMP_CLAUSE_FILTER */
318 1, /* OMP_CLAUSE__SIMDUID_ */
319 0, /* OMP_CLAUSE__SIMT_ */
320 0, /* OMP_CLAUSE_INDEPENDENT */
321 1, /* OMP_CLAUSE_WORKER */
322 1, /* OMP_CLAUSE_VECTOR */
323 1, /* OMP_CLAUSE_NUM_GANGS */
324 1, /* OMP_CLAUSE_NUM_WORKERS */
325 1, /* OMP_CLAUSE_VECTOR_LENGTH */
326 3, /* OMP_CLAUSE_TILE */
327 0, /* OMP_CLAUSE_IF_PRESENT */
328 0, /* OMP_CLAUSE_FINALIZE */
329 0, /* OMP_CLAUSE_NOHOST */
332 const char * const omp_clause_code_name[] =
334 "error_clause",
335 "private",
336 "shared",
337 "firstprivate",
338 "lastprivate",
339 "reduction",
340 "task_reduction",
341 "in_reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "affinity",
346 "aligned",
347 "allocate",
348 "depend",
349 "nontemporal",
350 "uniform",
351 "enter",
352 "link",
353 "detach",
354 "use_device_ptr",
355 "use_device_addr",
356 "is_device_ptr",
357 "inclusive",
358 "exclusive",
359 "from",
360 "to",
361 "map",
362 "has_device_addr",
363 "doacross",
364 "_cache_",
365 "gang",
366 "async",
367 "wait",
368 "auto",
369 "seq",
370 "_looptemp_",
371 "_reductemp_",
372 "_condtemp_",
373 "_scantemp_",
374 "if",
375 "self",
376 "num_threads",
377 "schedule",
378 "nowait",
379 "ordered",
380 "default",
381 "collapse",
382 "untied",
383 "final",
384 "mergeable",
385 "device",
386 "dist_schedule",
387 "inbranch",
388 "notinbranch",
389 "num_teams",
390 "thread_limit",
391 "proc_bind",
392 "safelen",
393 "simdlen",
394 "device_type",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "order",
408 "bind",
409 "filter",
410 "_simduid_",
411 "_simt_",
412 "independent",
413 "worker",
414 "vector",
415 "num_gangs",
416 "num_workers",
417 "vector_length",
418 "tile",
419 "if_present",
420 "finalize",
421 "nohost",
424 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
425 clause names, but for use in diagnostics etc. would like to use the "user"
426 clause names. */
428 const char *
429 user_omp_clause_code_name (tree clause, bool oacc)
431 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
432 distinguish clauses as seen by the user. See also where front ends do
433 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
434 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
435 switch (OMP_CLAUSE_MAP_KIND (clause))
437 case GOMP_MAP_FORCE_ALLOC:
438 case GOMP_MAP_ALLOC: return "create";
439 case GOMP_MAP_FORCE_TO:
440 case GOMP_MAP_TO: return "copyin";
441 case GOMP_MAP_FORCE_FROM:
442 case GOMP_MAP_FROM: return "copyout";
443 case GOMP_MAP_FORCE_TOFROM:
444 case GOMP_MAP_TOFROM: return "copy";
445 case GOMP_MAP_RELEASE: return "delete";
446 case GOMP_MAP_FORCE_PRESENT: return "present";
447 case GOMP_MAP_ATTACH: return "attach";
448 case GOMP_MAP_FORCE_DETACH:
449 case GOMP_MAP_DETACH: return "detach";
450 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
451 case GOMP_MAP_LINK: return "link";
452 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
453 default: break;
456 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
460 /* Return the tree node structure used by tree code CODE. */
462 static inline enum tree_node_structure_enum
463 tree_node_structure_for_code (enum tree_code code)
465 switch (TREE_CODE_CLASS (code))
467 case tcc_declaration:
468 switch (code)
470 case CONST_DECL: return TS_CONST_DECL;
471 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
472 case FIELD_DECL: return TS_FIELD_DECL;
473 case FUNCTION_DECL: return TS_FUNCTION_DECL;
474 case LABEL_DECL: return TS_LABEL_DECL;
475 case PARM_DECL: return TS_PARM_DECL;
476 case RESULT_DECL: return TS_RESULT_DECL;
477 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
478 case TYPE_DECL: return TS_TYPE_DECL;
479 case VAR_DECL: return TS_VAR_DECL;
480 default: return TS_DECL_NON_COMMON;
483 case tcc_type: return TS_TYPE_NON_COMMON;
485 case tcc_binary:
486 case tcc_comparison:
487 case tcc_expression:
488 case tcc_reference:
489 case tcc_statement:
490 case tcc_unary:
491 case tcc_vl_exp: return TS_EXP;
493 default: /* tcc_constant and tcc_exceptional */
494 break;
497 switch (code)
499 /* tcc_constant cases. */
500 case COMPLEX_CST: return TS_COMPLEX;
501 case FIXED_CST: return TS_FIXED_CST;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case STRING_CST: return TS_STRING;
506 case VECTOR_CST: return TS_VECTOR;
507 case VOID_CST: return TS_TYPED;
509 /* tcc_exceptional cases. */
510 case BLOCK: return TS_BLOCK;
511 case CONSTRUCTOR: return TS_CONSTRUCTOR;
512 case ERROR_MARK: return TS_COMMON;
513 case IDENTIFIER_NODE: return TS_IDENTIFIER;
514 case OMP_CLAUSE: return TS_OMP_CLAUSE;
515 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
516 case PLACEHOLDER_EXPR: return TS_COMMON;
517 case SSA_NAME: return TS_SSA_NAME;
518 case STATEMENT_LIST: return TS_STATEMENT_LIST;
519 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
520 case TREE_BINFO: return TS_BINFO;
521 case TREE_LIST: return TS_LIST;
522 case TREE_VEC: return TS_VEC;
524 default:
525 gcc_unreachable ();
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
533 static void
534 initialize_tree_contains_struct (void)
536 unsigned i;
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
629 default:
630 gcc_unreachable ();
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
678 /* Init tree.cc. */
680 void
681 init_ttree (void)
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
697 int_cst_node = make_int_cst (1, 1);
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
732 /* Return true if DECL may need an assembler name to be set. */
734 static inline bool
735 need_assembler_name_p (tree decl)
737 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
738 Rule merging. This makes type_odr_p to return true on those types during
739 LTO and by comparing the mangled name, we can say what types are intended
740 to be equivalent across compilation unit.
742 We do not store names of type_in_anonymous_namespace_p.
744 Record, union and enumeration type have linkage that allows use
745 to check type_in_anonymous_namespace_p. We do not mangle compound types
746 that always can be compared structurally.
748 Similarly for builtin types, we compare properties of their main variant.
749 A special case are integer types where mangling do make differences
750 between char/signed char/unsigned char etc. Storing name for these makes
751 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
752 See cp/mangle.cc:write_builtin_type for details. */
754 if (TREE_CODE (decl) == TYPE_DECL)
756 if (DECL_NAME (decl)
757 && decl == TYPE_NAME (TREE_TYPE (decl))
758 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
759 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
760 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
761 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
762 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
763 && (type_with_linkage_p (TREE_TYPE (decl))
764 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
765 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
766 return !DECL_ASSEMBLER_NAME_SET_P (decl);
767 return false;
769 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
770 if (!VAR_OR_FUNCTION_DECL_P (decl))
771 return false;
773 /* If DECL already has its assembler name set, it does not need a
774 new one. */
775 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
776 || DECL_ASSEMBLER_NAME_SET_P (decl))
777 return false;
779 /* Abstract decls do not need an assembler name. */
780 if (DECL_ABSTRACT_P (decl))
781 return false;
783 /* For VAR_DECLs, only static, public and external symbols need an
784 assembler name. */
785 if (VAR_P (decl)
786 && !TREE_STATIC (decl)
787 && !TREE_PUBLIC (decl)
788 && !DECL_EXTERNAL (decl))
789 return false;
791 if (TREE_CODE (decl) == FUNCTION_DECL)
793 /* Do not set assembler name on builtins. Allow RTL expansion to
794 decide whether to expand inline or via a regular call. */
795 if (fndecl_built_in_p (decl)
796 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
797 return false;
799 /* Functions represented in the callgraph need an assembler name. */
800 if (cgraph_node::get (decl) != NULL)
801 return true;
803 /* Unused and not public functions don't need an assembler name. */
804 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
805 return false;
808 return true;
811 /* If T needs an assembler name, have one created for it. */
813 void
814 assign_assembler_name_if_needed (tree t)
816 if (need_assembler_name_p (t))
818 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
819 diagnostics that use input_location to show locus
820 information. The problem here is that, at this point,
821 input_location is generally anchored to the end of the file
822 (since the parser is long gone), so we don't have a good
823 position to pin it to.
825 To alleviate this problem, this uses the location of T's
826 declaration. Examples of this are
827 testsuite/g++.dg/template/cond2.C and
828 testsuite/g++.dg/template/pr35240.C. */
829 location_t saved_location = input_location;
830 input_location = DECL_SOURCE_LOCATION (t);
832 decl_assembler_name (t);
834 input_location = saved_location;
838 /* When the target supports COMDAT groups, this indicates which group the
839 DECL is associated with. This can be either an IDENTIFIER_NODE or a
840 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
841 tree
842 decl_comdat_group (const_tree node)
844 struct symtab_node *snode = symtab_node::get (node);
845 if (!snode)
846 return NULL;
847 return snode->get_comdat_group ();
850 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
851 tree
852 decl_comdat_group_id (const_tree node)
854 struct symtab_node *snode = symtab_node::get (node);
855 if (!snode)
856 return NULL;
857 return snode->get_comdat_group_id ();
860 /* When the target supports named section, return its name as IDENTIFIER_NODE
861 or NULL if it is in no section. */
862 const char *
863 decl_section_name (const_tree node)
865 struct symtab_node *snode = symtab_node::get (node);
866 if (!snode)
867 return NULL;
868 return snode->get_section ();
871 /* Set section name of NODE to VALUE (that is expected to be
872 identifier node) */
873 void
874 set_decl_section_name (tree node, const char *value)
876 struct symtab_node *snode;
878 if (value == NULL)
880 snode = symtab_node::get (node);
881 if (!snode)
882 return;
884 else if (VAR_P (node))
885 snode = varpool_node::get_create (node);
886 else
887 snode = cgraph_node::get_create (node);
888 snode->set_section (value);
891 /* Set section name of NODE to match the section name of OTHER.
893 set_decl_section_name (decl, other) is equivalent to
894 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
895 efficient. */
896 void
897 set_decl_section_name (tree decl, const_tree other)
899 struct symtab_node *other_node = symtab_node::get (other);
900 if (other_node)
902 struct symtab_node *decl_node;
903 if (VAR_P (decl))
904 decl_node = varpool_node::get_create (decl);
905 else
906 decl_node = cgraph_node::get_create (decl);
907 decl_node->set_section (*other_node);
909 else
911 struct symtab_node *decl_node = symtab_node::get (decl);
912 if (!decl_node)
913 return;
914 decl_node->set_section (NULL);
918 /* Return TLS model of a variable NODE. */
919 enum tls_model
920 decl_tls_model (const_tree node)
922 struct varpool_node *snode = varpool_node::get (node);
923 if (!snode)
924 return TLS_MODEL_NONE;
925 return snode->tls_model;
928 /* Set TLS model of variable NODE to MODEL. */
929 void
930 set_decl_tls_model (tree node, enum tls_model model)
932 struct varpool_node *vnode;
934 if (model == TLS_MODEL_NONE)
936 vnode = varpool_node::get (node);
937 if (!vnode)
938 return;
940 else
941 vnode = varpool_node::get_create (node);
942 vnode->tls_model = model;
945 /* Compute the number of bytes occupied by a tree with code CODE.
946 This function cannot be used for nodes that have variable sizes,
947 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
948 size_t
949 tree_code_size (enum tree_code code)
951 switch (TREE_CODE_CLASS (code))
953 case tcc_declaration: /* A decl node */
954 switch (code)
956 case FIELD_DECL: return sizeof (tree_field_decl);
957 case PARM_DECL: return sizeof (tree_parm_decl);
958 case VAR_DECL: return sizeof (tree_var_decl);
959 case LABEL_DECL: return sizeof (tree_label_decl);
960 case RESULT_DECL: return sizeof (tree_result_decl);
961 case CONST_DECL: return sizeof (tree_const_decl);
962 case TYPE_DECL: return sizeof (tree_type_decl);
963 case FUNCTION_DECL: return sizeof (tree_function_decl);
964 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
965 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
966 case NAMESPACE_DECL:
967 case IMPORTED_DECL:
968 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
969 default:
970 gcc_checking_assert (code >= NUM_TREE_CODES);
971 return lang_hooks.tree_size (code);
974 case tcc_type: /* a type node */
975 switch (code)
977 case OFFSET_TYPE:
978 case ENUMERAL_TYPE:
979 case BOOLEAN_TYPE:
980 case INTEGER_TYPE:
981 case REAL_TYPE:
982 case OPAQUE_TYPE:
983 case POINTER_TYPE:
984 case REFERENCE_TYPE:
985 case NULLPTR_TYPE:
986 case FIXED_POINT_TYPE:
987 case COMPLEX_TYPE:
988 case VECTOR_TYPE:
989 case ARRAY_TYPE:
990 case RECORD_TYPE:
991 case UNION_TYPE:
992 case QUAL_UNION_TYPE:
993 case VOID_TYPE:
994 case FUNCTION_TYPE:
995 case METHOD_TYPE:
996 case BITINT_TYPE:
997 case LANG_TYPE: return sizeof (tree_type_non_common);
998 default:
999 gcc_checking_assert (code >= NUM_TREE_CODES);
1000 return lang_hooks.tree_size (code);
1003 case tcc_reference: /* a reference */
1004 case tcc_expression: /* an expression */
1005 case tcc_statement: /* an expression with side effects */
1006 case tcc_comparison: /* a comparison expression */
1007 case tcc_unary: /* a unary arithmetic expression */
1008 case tcc_binary: /* a binary arithmetic expression */
1009 return (sizeof (struct tree_exp)
1010 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1012 case tcc_constant: /* a constant */
1013 switch (code)
1015 case VOID_CST: return sizeof (tree_typed);
1016 case INTEGER_CST: gcc_unreachable ();
1017 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1018 case REAL_CST: return sizeof (tree_real_cst);
1019 case FIXED_CST: return sizeof (tree_fixed_cst);
1020 case COMPLEX_CST: return sizeof (tree_complex);
1021 case VECTOR_CST: gcc_unreachable ();
1022 case STRING_CST: gcc_unreachable ();
1023 default:
1024 gcc_checking_assert (code >= NUM_TREE_CODES);
1025 return lang_hooks.tree_size (code);
1028 case tcc_exceptional: /* something random, like an identifier. */
1029 switch (code)
1031 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1032 case TREE_LIST: return sizeof (tree_list);
1034 case ERROR_MARK:
1035 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1037 case TREE_VEC: gcc_unreachable ();
1038 case OMP_CLAUSE: gcc_unreachable ();
1040 case SSA_NAME: return sizeof (tree_ssa_name);
1042 case STATEMENT_LIST: return sizeof (tree_statement_list);
1043 case BLOCK: return sizeof (struct tree_block);
1044 case CONSTRUCTOR: return sizeof (tree_constructor);
1045 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1046 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1048 default:
1049 gcc_checking_assert (code >= NUM_TREE_CODES);
1050 return lang_hooks.tree_size (code);
1053 default:
1054 gcc_unreachable ();
1058 /* Compute the number of bytes occupied by NODE. This routine only
1059 looks at TREE_CODE, except for those nodes that have variable sizes. */
1060 size_t
1061 tree_size (const_tree node)
1063 const enum tree_code code = TREE_CODE (node);
1064 switch (code)
1066 case INTEGER_CST:
1067 return (sizeof (struct tree_int_cst)
1068 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1070 case TREE_BINFO:
1071 return (offsetof (struct tree_binfo, base_binfos)
1072 + vec<tree, va_gc>
1073 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1075 case TREE_VEC:
1076 return (sizeof (struct tree_vec)
1077 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1079 case VECTOR_CST:
1080 return (sizeof (struct tree_vector)
1081 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1083 case STRING_CST:
1084 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1086 case OMP_CLAUSE:
1087 return (sizeof (struct tree_omp_clause)
1088 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1089 * sizeof (tree));
1091 default:
1092 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1093 return (sizeof (struct tree_exp)
1094 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1095 else
1096 return tree_code_size (code);
1100 /* Return tree node kind based on tree CODE. */
1102 static tree_node_kind
1103 get_stats_node_kind (enum tree_code code)
1105 enum tree_code_class type = TREE_CODE_CLASS (code);
1107 switch (type)
1109 case tcc_declaration: /* A decl node */
1110 return d_kind;
1111 case tcc_type: /* a type node */
1112 return t_kind;
1113 case tcc_statement: /* an expression with side effects */
1114 return s_kind;
1115 case tcc_reference: /* a reference */
1116 return r_kind;
1117 case tcc_expression: /* an expression */
1118 case tcc_comparison: /* a comparison expression */
1119 case tcc_unary: /* a unary arithmetic expression */
1120 case tcc_binary: /* a binary arithmetic expression */
1121 return e_kind;
1122 case tcc_constant: /* a constant */
1123 return c_kind;
1124 case tcc_exceptional: /* something random, like an identifier. */
1125 switch (code)
1127 case IDENTIFIER_NODE:
1128 return id_kind;
1129 case TREE_VEC:
1130 return vec_kind;
1131 case TREE_BINFO:
1132 return binfo_kind;
1133 case SSA_NAME:
1134 return ssa_name_kind;
1135 case BLOCK:
1136 return b_kind;
1137 case CONSTRUCTOR:
1138 return constr_kind;
1139 case OMP_CLAUSE:
1140 return omp_clause_kind;
1141 default:
1142 return x_kind;
1144 break;
1145 case tcc_vl_exp:
1146 return e_kind;
1147 default:
1148 gcc_unreachable ();
1152 /* Record interesting allocation statistics for a tree node with CODE
1153 and LENGTH. */
1155 static void
1156 record_node_allocation_statistics (enum tree_code code, size_t length)
1158 if (!GATHER_STATISTICS)
1159 return;
1161 tree_node_kind kind = get_stats_node_kind (code);
1163 tree_code_counts[(int) code]++;
1164 tree_node_counts[(int) kind]++;
1165 tree_node_sizes[(int) kind] += length;
1168 /* Allocate and return a new UID from the DECL_UID namespace. */
1171 allocate_decl_uid (void)
1173 return next_decl_uid++;
1176 /* Return a newly allocated node of code CODE. For decl and type
1177 nodes, some other fields are initialized. The rest of the node is
1178 initialized to zero. This function cannot be used for TREE_VEC,
1179 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1180 tree_code_size.
1182 Achoo! I got a code in the node. */
1184 tree
1185 make_node (enum tree_code code MEM_STAT_DECL)
1187 tree t;
1188 enum tree_code_class type = TREE_CODE_CLASS (code);
1189 size_t length = tree_code_size (code);
1191 record_node_allocation_statistics (code, length);
1193 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1194 TREE_SET_CODE (t, code);
1196 switch (type)
1198 case tcc_statement:
1199 if (code != DEBUG_BEGIN_STMT)
1200 TREE_SIDE_EFFECTS (t) = 1;
1201 break;
1203 case tcc_declaration:
1204 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1206 if (code == FUNCTION_DECL)
1208 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1209 SET_DECL_MODE (t, FUNCTION_MODE);
1211 else
1212 SET_DECL_ALIGN (t, 1);
1214 DECL_SOURCE_LOCATION (t) = input_location;
1215 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1216 DECL_UID (t) = --next_debug_decl_uid;
1217 else
1219 DECL_UID (t) = allocate_decl_uid ();
1220 SET_DECL_PT_UID (t, -1);
1222 if (TREE_CODE (t) == LABEL_DECL)
1223 LABEL_DECL_UID (t) = -1;
1225 break;
1227 case tcc_type:
1228 TYPE_UID (t) = next_type_uid++;
1229 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1230 TYPE_USER_ALIGN (t) = 0;
1231 TYPE_MAIN_VARIANT (t) = t;
1232 TYPE_CANONICAL (t) = t;
1234 /* Default to no attributes for type, but let target change that. */
1235 TYPE_ATTRIBUTES (t) = NULL_TREE;
1236 targetm.set_default_type_attributes (t);
1238 /* We have not yet computed the alias set for this type. */
1239 TYPE_ALIAS_SET (t) = -1;
1240 break;
1242 case tcc_constant:
1243 TREE_CONSTANT (t) = 1;
1244 break;
1246 case tcc_expression:
1247 switch (code)
1249 case INIT_EXPR:
1250 case MODIFY_EXPR:
1251 case VA_ARG_EXPR:
1252 case PREDECREMENT_EXPR:
1253 case PREINCREMENT_EXPR:
1254 case POSTDECREMENT_EXPR:
1255 case POSTINCREMENT_EXPR:
1256 /* All of these have side-effects, no matter what their
1257 operands are. */
1258 TREE_SIDE_EFFECTS (t) = 1;
1259 break;
1261 default:
1262 break;
1264 break;
1266 case tcc_exceptional:
1267 switch (code)
1269 case TARGET_OPTION_NODE:
1270 TREE_TARGET_OPTION(t)
1271 = ggc_cleared_alloc<struct cl_target_option> ();
1272 break;
1274 case OPTIMIZATION_NODE:
1275 TREE_OPTIMIZATION (t)
1276 = ggc_cleared_alloc<struct cl_optimization> ();
1277 break;
1279 default:
1280 break;
1282 break;
1284 default:
1285 /* Other classes need no special treatment. */
1286 break;
1289 return t;
1292 /* Free tree node. */
1294 void
1295 free_node (tree node)
1297 enum tree_code code = TREE_CODE (node);
1298 if (GATHER_STATISTICS)
1300 enum tree_node_kind kind = get_stats_node_kind (code);
1302 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1303 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1304 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1306 tree_code_counts[(int) TREE_CODE (node)]--;
1307 tree_node_counts[(int) kind]--;
1308 tree_node_sizes[(int) kind] -= tree_size (node);
1310 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1311 vec_free (CONSTRUCTOR_ELTS (node));
1312 else if (code == BLOCK)
1313 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1314 else if (code == TREE_BINFO)
1315 vec_free (BINFO_BASE_ACCESSES (node));
1316 else if (code == OPTIMIZATION_NODE)
1317 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1318 else if (code == TARGET_OPTION_NODE)
1319 cl_target_option_free (TREE_TARGET_OPTION (node));
1320 ggc_free (node);
1323 /* Return a new node with the same contents as NODE except that its
1324 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1326 tree
1327 copy_node (tree node MEM_STAT_DECL)
1329 tree t;
1330 enum tree_code code = TREE_CODE (node);
1331 size_t length;
1333 gcc_assert (code != STATEMENT_LIST);
1335 length = tree_size (node);
1336 record_node_allocation_statistics (code, length);
1337 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1338 memcpy (t, node, length);
1340 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1341 TREE_CHAIN (t) = 0;
1342 TREE_ASM_WRITTEN (t) = 0;
1343 TREE_VISITED (t) = 0;
1345 if (TREE_CODE_CLASS (code) == tcc_declaration)
1347 if (code == DEBUG_EXPR_DECL)
1348 DECL_UID (t) = --next_debug_decl_uid;
1349 else
1351 DECL_UID (t) = allocate_decl_uid ();
1352 if (DECL_PT_UID_SET_P (node))
1353 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1355 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1356 && DECL_HAS_VALUE_EXPR_P (node))
1358 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1359 DECL_HAS_VALUE_EXPR_P (t) = 1;
1361 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1362 if (VAR_P (node))
1364 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1365 t->decl_with_vis.symtab_node = NULL;
1367 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1369 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1370 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1372 if (TREE_CODE (node) == FUNCTION_DECL)
1374 DECL_STRUCT_FUNCTION (t) = NULL;
1375 t->decl_with_vis.symtab_node = NULL;
1378 else if (TREE_CODE_CLASS (code) == tcc_type)
1380 TYPE_UID (t) = next_type_uid++;
1381 /* The following is so that the debug code for
1382 the copy is different from the original type.
1383 The two statements usually duplicate each other
1384 (because they clear fields of the same union),
1385 but the optimizer should catch that. */
1386 TYPE_SYMTAB_ADDRESS (t) = 0;
1387 TYPE_SYMTAB_DIE (t) = 0;
1389 /* Do not copy the values cache. */
1390 if (TYPE_CACHED_VALUES_P (t))
1392 TYPE_CACHED_VALUES_P (t) = 0;
1393 TYPE_CACHED_VALUES (t) = NULL_TREE;
1396 else if (code == TARGET_OPTION_NODE)
1398 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1399 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1400 sizeof (struct cl_target_option));
1402 else if (code == OPTIMIZATION_NODE)
1404 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1405 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1406 sizeof (struct cl_optimization));
1409 return t;
1412 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1413 For example, this can copy a list made of TREE_LIST nodes. */
1415 tree
1416 copy_list (tree list)
1418 tree head;
1419 tree prev, next;
1421 if (list == 0)
1422 return 0;
1424 head = prev = copy_node (list);
1425 next = TREE_CHAIN (list);
1426 while (next)
1428 TREE_CHAIN (prev) = copy_node (next);
1429 prev = TREE_CHAIN (prev);
1430 next = TREE_CHAIN (next);
1432 return head;
1436 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1437 INTEGER_CST with value CST and type TYPE. */
1439 static unsigned int
1440 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1442 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1443 /* We need extra HWIs if CST is an unsigned integer with its
1444 upper bit set. */
1445 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1446 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1447 return cst.get_len ();
1450 /* Return a new INTEGER_CST with value CST and type TYPE. */
1452 static tree
1453 build_new_int_cst (tree type, const wide_int &cst)
1455 unsigned int len = cst.get_len ();
1456 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1457 tree nt = make_int_cst (len, ext_len);
1459 if (len < ext_len)
1461 --ext_len;
1462 TREE_INT_CST_ELT (nt, ext_len)
1463 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1464 for (unsigned int i = len; i < ext_len; ++i)
1465 TREE_INT_CST_ELT (nt, i) = -1;
1467 else if (TYPE_UNSIGNED (type)
1468 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1470 len--;
1471 TREE_INT_CST_ELT (nt, len)
1472 = zext_hwi (cst.elt (len),
1473 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1476 for (unsigned int i = 0; i < len; i++)
1477 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1478 TREE_TYPE (nt) = type;
1479 return nt;
1482 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1484 static tree
1485 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1486 CXX_MEM_STAT_INFO)
1488 size_t length = sizeof (struct tree_poly_int_cst);
1489 record_node_allocation_statistics (POLY_INT_CST, length);
1491 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1493 TREE_SET_CODE (t, POLY_INT_CST);
1494 TREE_CONSTANT (t) = 1;
1495 TREE_TYPE (t) = type;
1496 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1497 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1498 return t;
1501 /* Create a constant tree that contains CST sign-extended to TYPE. */
1503 tree
1504 build_int_cst (tree type, poly_int64 cst)
1506 /* Support legacy code. */
1507 if (!type)
1508 type = integer_type_node;
1510 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1513 /* Create a constant tree that contains CST zero-extended to TYPE. */
1515 tree
1516 build_int_cstu (tree type, poly_uint64 cst)
1518 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1521 /* Create a constant tree that contains CST sign-extended to TYPE. */
1523 tree
1524 build_int_cst_type (tree type, poly_int64 cst)
1526 gcc_assert (type);
1527 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1530 /* Constructs tree in type TYPE from with value given by CST. Signedness
1531 of CST is assumed to be the same as the signedness of TYPE. */
1533 tree
1534 double_int_to_tree (tree type, double_int cst)
1536 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1539 /* We force the wide_int CST to the range of the type TYPE by sign or
1540 zero extending it. OVERFLOWABLE indicates if we are interested in
1541 overflow of the value, when >0 we are only interested in signed
1542 overflow, for <0 we are interested in any overflow. OVERFLOWED
1543 indicates whether overflow has already occurred. CONST_OVERFLOWED
1544 indicates whether constant overflow has already occurred. We force
1545 T's value to be within range of T's type (by setting to 0 or 1 all
1546 the bits outside the type's range). We set TREE_OVERFLOWED if,
1547 OVERFLOWED is nonzero,
1548 or OVERFLOWABLE is >0 and signed overflow occurs
1549 or OVERFLOWABLE is <0 and any overflow occurs
1550 We return a new tree node for the extended wide_int. The node
1551 is shared if no overflow flags are set. */
1554 tree
1555 force_fit_type (tree type, const poly_wide_int_ref &cst,
1556 int overflowable, bool overflowed)
1558 signop sign = TYPE_SIGN (type);
1560 /* If we need to set overflow flags, return a new unshared node. */
1561 if (overflowed || !wi::fits_to_tree_p (cst, type))
1563 if (overflowed
1564 || overflowable < 0
1565 || (overflowable > 0 && sign == SIGNED))
1567 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1568 sign);
1569 tree t;
1570 if (tmp.is_constant ())
1571 t = build_new_int_cst (type, tmp.coeffs[0]);
1572 else
1574 tree coeffs[NUM_POLY_INT_COEFFS];
1575 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1577 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1578 TREE_OVERFLOW (coeffs[i]) = 1;
1580 t = build_new_poly_int_cst (type, coeffs);
1582 TREE_OVERFLOW (t) = 1;
1583 return t;
1587 /* Else build a shared node. */
1588 return wide_int_to_tree (type, cst);
1591 /* These are the hash table functions for the hash table of INTEGER_CST
1592 nodes of a sizetype. */
1594 /* Return the hash code X, an INTEGER_CST. */
1596 hashval_t
1597 int_cst_hasher::hash (tree x)
1599 const_tree const t = x;
1600 hashval_t code = TYPE_UID (TREE_TYPE (t));
1601 int i;
1603 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1604 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1606 return code;
1609 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1610 is the same as that given by *Y, which is the same. */
1612 bool
1613 int_cst_hasher::equal (tree x, tree y)
1615 const_tree const xt = x;
1616 const_tree const yt = y;
1618 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1619 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1620 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1621 return false;
1623 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1624 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1625 return false;
1627 return true;
1630 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1631 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1632 number of slots that can be cached for the type. */
1634 static inline tree
1635 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1636 int slot, int max_slots)
1638 gcc_checking_assert (slot >= 0);
1639 /* Initialize cache. */
1640 if (!TYPE_CACHED_VALUES_P (type))
1642 TYPE_CACHED_VALUES_P (type) = 1;
1643 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1645 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1646 if (!t)
1648 /* Create a new shared int. */
1649 t = build_new_int_cst (type, cst);
1650 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1652 return t;
1655 /* Create an INT_CST node of TYPE and value CST.
1656 The returned node is always shared. For small integers we use a
1657 per-type vector cache, for larger ones we use a single hash table.
1658 The value is extended from its precision according to the sign of
1659 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1660 the upper bits and ensures that hashing and value equality based
1661 upon the underlying HOST_WIDE_INTs works without masking. */
1663 static tree
1664 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1666 tree t;
1667 int ix = -1;
1668 int limit = 0;
1670 gcc_assert (type);
1671 unsigned int prec = TYPE_PRECISION (type);
1672 signop sgn = TYPE_SIGN (type);
1674 /* Verify that everything is canonical. */
1675 int l = pcst.get_len ();
1676 if (l > 1)
1678 if (pcst.elt (l - 1) == 0)
1679 gcc_checking_assert (pcst.elt (l - 2) < 0);
1680 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1681 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1684 wide_int cst = wide_int::from (pcst, prec, sgn);
1685 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1687 enum tree_code code = TREE_CODE (type);
1688 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1690 /* Cache NULL pointer and zero bounds. */
1691 if (cst == 0)
1692 ix = 0;
1693 /* Cache upper bounds of pointers. */
1694 else if (cst == wi::max_value (prec, sgn))
1695 ix = 1;
1696 /* Cache 1 which is used for a non-zero range. */
1697 else if (cst == 1)
1698 ix = 2;
1700 if (ix >= 0)
1702 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1703 /* Make sure no one is clobbering the shared constant. */
1704 gcc_checking_assert (TREE_TYPE (t) == type
1705 && cst == wi::to_wide (t));
1706 return t;
1709 if (ext_len == 1)
1711 /* We just need to store a single HOST_WIDE_INT. */
1712 HOST_WIDE_INT hwi;
1713 if (TYPE_UNSIGNED (type))
1714 hwi = cst.to_uhwi ();
1715 else
1716 hwi = cst.to_shwi ();
1718 switch (code)
1720 case NULLPTR_TYPE:
1721 gcc_assert (hwi == 0);
1722 /* Fallthru. */
1724 case POINTER_TYPE:
1725 case REFERENCE_TYPE:
1726 /* Ignore pointers, as they were already handled above. */
1727 break;
1729 case BOOLEAN_TYPE:
1730 /* Cache false or true. */
1731 limit = 2;
1732 if (IN_RANGE (hwi, 0, 1))
1733 ix = hwi;
1734 break;
1736 case INTEGER_TYPE:
1737 case OFFSET_TYPE:
1738 case BITINT_TYPE:
1739 if (TYPE_SIGN (type) == UNSIGNED)
1741 /* Cache [0, N). */
1742 limit = param_integer_share_limit;
1743 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1744 ix = hwi;
1746 else
1748 /* Cache [-1, N). */
1749 limit = param_integer_share_limit + 1;
1750 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1751 ix = hwi + 1;
1753 break;
1755 case ENUMERAL_TYPE:
1756 break;
1758 default:
1759 gcc_unreachable ();
1762 if (ix >= 0)
1764 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1765 /* Make sure no one is clobbering the shared constant. */
1766 gcc_checking_assert (TREE_TYPE (t) == type
1767 && TREE_INT_CST_NUNITS (t) == 1
1768 && TREE_INT_CST_EXT_NUNITS (t) == 1
1769 && TREE_INT_CST_ELT (t, 0) == hwi);
1770 return t;
1772 else
1774 /* Use the cache of larger shared ints, using int_cst_node as
1775 a temporary. */
1777 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1778 TREE_TYPE (int_cst_node) = type;
1780 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1781 t = *slot;
1782 if (!t)
1784 /* Insert this one into the hash table. */
1785 t = int_cst_node;
1786 *slot = t;
1787 /* Make a new node for next time round. */
1788 int_cst_node = make_int_cst (1, 1);
1792 else
1794 /* The value either hashes properly or we drop it on the floor
1795 for the gc to take care of. There will not be enough of them
1796 to worry about. */
1798 tree nt = build_new_int_cst (type, cst);
1799 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1800 t = *slot;
1801 if (!t)
1803 /* Insert this one into the hash table. */
1804 t = nt;
1805 *slot = t;
1807 else
1808 ggc_free (nt);
1811 return t;
1814 hashval_t
1815 poly_int_cst_hasher::hash (tree t)
1817 inchash::hash hstate;
1819 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1820 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1821 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1823 return hstate.end ();
1826 bool
1827 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1829 if (TREE_TYPE (x) != y.first)
1830 return false;
1831 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1832 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1833 return false;
1834 return true;
1837 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1838 The elements must also have type TYPE. */
1840 tree
1841 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1843 unsigned int prec = TYPE_PRECISION (type);
1844 gcc_assert (prec <= values.coeffs[0].get_precision ());
1845 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1847 inchash::hash h;
1848 h.add_int (TYPE_UID (type));
1849 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1850 h.add_wide_int (c.coeffs[i]);
1851 poly_int_cst_hasher::compare_type comp (type, &c);
1852 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1853 INSERT);
1854 if (*slot == NULL_TREE)
1856 tree coeffs[NUM_POLY_INT_COEFFS];
1857 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1858 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1859 *slot = build_new_poly_int_cst (type, coeffs);
1861 return *slot;
1864 /* Create a constant tree with value VALUE in type TYPE. */
1866 tree
1867 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1869 if (value.is_constant ())
1870 return wide_int_to_tree_1 (type, value.coeffs[0]);
1871 return build_poly_int_cst (type, value);
1874 /* Insert INTEGER_CST T into a cache of integer constants. And return
1875 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1876 is false, and T falls into the type's 'smaller values' range, there
1877 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1878 or the value is large, should an existing entry exist, it is
1879 returned (rather than inserting T). */
1881 tree
1882 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1884 tree type = TREE_TYPE (t);
1885 int ix = -1;
1886 int limit = 0;
1887 int prec = TYPE_PRECISION (type);
1889 gcc_assert (!TREE_OVERFLOW (t));
1891 /* The caching indices here must match those in
1892 wide_int_to_type_1. */
1893 switch (TREE_CODE (type))
1895 case NULLPTR_TYPE:
1896 gcc_checking_assert (integer_zerop (t));
1897 /* Fallthru. */
1899 case POINTER_TYPE:
1900 case REFERENCE_TYPE:
1902 if (integer_zerop (t))
1903 ix = 0;
1904 else if (integer_onep (t))
1905 ix = 2;
1907 if (ix >= 0)
1908 limit = 3;
1910 break;
1912 case BOOLEAN_TYPE:
1913 /* Cache false or true. */
1914 limit = 2;
1915 if (wi::ltu_p (wi::to_wide (t), 2))
1916 ix = TREE_INT_CST_ELT (t, 0);
1917 break;
1919 case INTEGER_TYPE:
1920 case OFFSET_TYPE:
1921 case BITINT_TYPE:
1922 if (TYPE_UNSIGNED (type))
1924 /* Cache 0..N */
1925 limit = param_integer_share_limit;
1927 /* This is a little hokie, but if the prec is smaller than
1928 what is necessary to hold param_integer_share_limit, then the
1929 obvious test will not get the correct answer. */
1930 if (prec < HOST_BITS_PER_WIDE_INT)
1932 if (tree_to_uhwi (t)
1933 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1934 ix = tree_to_uhwi (t);
1936 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1937 ix = tree_to_uhwi (t);
1939 else
1941 /* Cache -1..N */
1942 limit = param_integer_share_limit + 1;
1944 if (integer_minus_onep (t))
1945 ix = 0;
1946 else if (!wi::neg_p (wi::to_wide (t)))
1948 if (prec < HOST_BITS_PER_WIDE_INT)
1950 if (tree_to_shwi (t) < param_integer_share_limit)
1951 ix = tree_to_shwi (t) + 1;
1953 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1954 ix = tree_to_shwi (t) + 1;
1957 break;
1959 case ENUMERAL_TYPE:
1960 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1961 members. */
1962 break;
1964 default:
1965 gcc_unreachable ();
1968 if (ix >= 0)
1970 /* Look for it in the type's vector of small shared ints. */
1971 if (!TYPE_CACHED_VALUES_P (type))
1973 TYPE_CACHED_VALUES_P (type) = 1;
1974 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1977 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1979 gcc_checking_assert (might_duplicate);
1980 t = r;
1982 else
1983 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1985 else
1987 /* Use the cache of larger shared ints. */
1988 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1989 if (tree r = *slot)
1991 /* If there is already an entry for the number verify it's the
1992 same value. */
1993 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1994 /* And return the cached value. */
1995 t = r;
1997 else
1998 /* Otherwise insert this one into the hash table. */
1999 *slot = t;
2002 return t;
2006 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2007 and the rest are zeros. */
2009 tree
2010 build_low_bits_mask (tree type, unsigned bits)
2012 gcc_assert (bits <= TYPE_PRECISION (type));
2014 return wide_int_to_tree (type, wi::mask (bits, false,
2015 TYPE_PRECISION (type)));
2018 /* Checks that X is integer constant that can be expressed in (unsigned)
2019 HOST_WIDE_INT without loss of precision. */
2021 bool
2022 cst_and_fits_in_hwi (const_tree x)
2024 return (TREE_CODE (x) == INTEGER_CST
2025 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2028 /* Build a newly constructed VECTOR_CST with the given values of
2029 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2031 tree
2032 make_vector (unsigned log2_npatterns,
2033 unsigned int nelts_per_pattern MEM_STAT_DECL)
2035 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2036 tree t;
2037 unsigned npatterns = 1 << log2_npatterns;
2038 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2039 unsigned length = (sizeof (struct tree_vector)
2040 + (encoded_nelts - 1) * sizeof (tree));
2042 record_node_allocation_statistics (VECTOR_CST, length);
2044 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2046 TREE_SET_CODE (t, VECTOR_CST);
2047 TREE_CONSTANT (t) = 1;
2048 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2049 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2051 return t;
2054 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2055 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2057 tree
2058 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2060 if (vec_safe_length (v) == 0)
2061 return build_zero_cst (type);
2063 unsigned HOST_WIDE_INT idx, nelts;
2064 tree value;
2066 /* We can't construct a VECTOR_CST for a variable number of elements. */
2067 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2068 tree_vector_builder vec (type, nelts, 1);
2069 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2071 if (TREE_CODE (value) == VECTOR_CST)
2073 /* If NELTS is constant then this must be too. */
2074 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2075 for (unsigned i = 0; i < sub_nelts; ++i)
2076 vec.quick_push (VECTOR_CST_ELT (value, i));
2078 else
2079 vec.quick_push (value);
2081 while (vec.length () < nelts)
2082 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2084 return vec.build ();
2087 /* Build a vector of type VECTYPE where all the elements are SCs. */
2088 tree
2089 build_vector_from_val (tree vectype, tree sc)
2091 unsigned HOST_WIDE_INT i, nunits;
2093 if (sc == error_mark_node)
2094 return sc;
2096 /* Verify that the vector type is suitable for SC. Note that there
2097 is some inconsistency in the type-system with respect to restrict
2098 qualifications of pointers. Vector types always have a main-variant
2099 element type and the qualification is applied to the vector-type.
2100 So TREE_TYPE (vector-type) does not return a properly qualified
2101 vector element-type. */
2102 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2103 TREE_TYPE (vectype)));
2105 if (CONSTANT_CLASS_P (sc))
2107 tree_vector_builder v (vectype, 1, 1);
2108 v.quick_push (sc);
2109 return v.build ();
2111 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2112 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2113 else
2115 vec<constructor_elt, va_gc> *v;
2116 vec_alloc (v, nunits);
2117 for (i = 0; i < nunits; ++i)
2118 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2119 return build_constructor (vectype, v);
2123 /* If TYPE is not a vector type, just return SC, otherwise return
2124 build_vector_from_val (TYPE, SC). */
2126 tree
2127 build_uniform_cst (tree type, tree sc)
2129 if (!VECTOR_TYPE_P (type))
2130 return sc;
2132 return build_vector_from_val (type, sc);
2135 /* Build a vector series of type TYPE in which element I has the value
2136 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2137 and a VEC_SERIES_EXPR otherwise. */
2139 tree
2140 build_vec_series (tree type, tree base, tree step)
2142 if (integer_zerop (step))
2143 return build_vector_from_val (type, base);
2144 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2146 tree_vector_builder builder (type, 1, 3);
2147 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2148 wi::to_wide (base) + wi::to_wide (step));
2149 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2150 wi::to_wide (elt1) + wi::to_wide (step));
2151 builder.quick_push (base);
2152 builder.quick_push (elt1);
2153 builder.quick_push (elt2);
2154 return builder.build ();
2156 return build2 (VEC_SERIES_EXPR, type, base, step);
2159 /* Return a vector with the same number of units and number of bits
2160 as VEC_TYPE, but in which the elements are a linear series of unsigned
2161 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2163 tree
2164 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2166 tree index_vec_type = vec_type;
2167 tree index_elt_type = TREE_TYPE (vec_type);
2168 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2169 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2171 index_elt_type = build_nonstandard_integer_type
2172 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2173 index_vec_type = build_vector_type (index_elt_type, nunits);
2176 tree_vector_builder v (index_vec_type, 1, 3);
2177 for (unsigned int i = 0; i < 3; ++i)
2178 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2179 return v.build ();
2182 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2183 elements are A and the rest are B. */
2185 tree
2186 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2188 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2189 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2190 /* Optimize the constant case. */
2191 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2192 count /= 2;
2193 tree_vector_builder builder (vec_type, count, 2);
2194 for (unsigned int i = 0; i < count * 2; ++i)
2195 builder.quick_push (i < num_a ? a : b);
2196 return builder.build ();
2199 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2200 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2202 void
2203 recompute_constructor_flags (tree c)
2205 unsigned int i;
2206 tree val;
2207 bool constant_p = true;
2208 bool side_effects_p = false;
2209 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2211 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2213 /* Mostly ctors will have elts that don't have side-effects, so
2214 the usual case is to scan all the elements. Hence a single
2215 loop for both const and side effects, rather than one loop
2216 each (with early outs). */
2217 if (!TREE_CONSTANT (val))
2218 constant_p = false;
2219 if (TREE_SIDE_EFFECTS (val))
2220 side_effects_p = true;
2223 TREE_SIDE_EFFECTS (c) = side_effects_p;
2224 TREE_CONSTANT (c) = constant_p;
2227 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2228 CONSTRUCTOR C. */
2230 void
2231 verify_constructor_flags (tree c)
2233 unsigned int i;
2234 tree val;
2235 bool constant_p = TREE_CONSTANT (c);
2236 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2237 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2239 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2241 if (constant_p && !TREE_CONSTANT (val))
2242 internal_error ("non-constant element in constant CONSTRUCTOR");
2243 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2244 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2248 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2249 are in the vec pointed to by VALS. */
2250 tree
2251 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2253 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2255 TREE_TYPE (c) = type;
2256 CONSTRUCTOR_ELTS (c) = vals;
2258 recompute_constructor_flags (c);
2260 return c;
2263 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2264 INDEX and VALUE. */
2265 tree
2266 build_constructor_single (tree type, tree index, tree value)
2268 vec<constructor_elt, va_gc> *v;
2269 constructor_elt elt = {index, value};
2271 vec_alloc (v, 1);
2272 v->quick_push (elt);
2274 return build_constructor (type, v);
2278 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2279 are in a list pointed to by VALS. */
2280 tree
2281 build_constructor_from_list (tree type, tree vals)
2283 tree t;
2284 vec<constructor_elt, va_gc> *v = NULL;
2286 if (vals)
2288 vec_alloc (v, list_length (vals));
2289 for (t = vals; t; t = TREE_CHAIN (t))
2290 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2293 return build_constructor (type, v);
2296 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2297 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2298 fields in the constructor remain null. */
2300 tree
2301 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2303 vec<constructor_elt, va_gc> *v = NULL;
2305 for (tree t : vals)
2306 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2308 return build_constructor (type, v);
2311 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2312 of elements, provided as index/value pairs. */
2314 tree
2315 build_constructor_va (tree type, int nelts, ...)
2317 vec<constructor_elt, va_gc> *v = NULL;
2318 va_list p;
2320 va_start (p, nelts);
2321 vec_alloc (v, nelts);
2322 while (nelts--)
2324 tree index = va_arg (p, tree);
2325 tree value = va_arg (p, tree);
2326 CONSTRUCTOR_APPEND_ELT (v, index, value);
2328 va_end (p);
2329 return build_constructor (type, v);
2332 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2334 tree
2335 build_clobber (tree type, enum clobber_kind kind)
2337 tree clobber = build_constructor (type, NULL);
2338 TREE_THIS_VOLATILE (clobber) = true;
2339 CLOBBER_KIND (clobber) = kind;
2340 return clobber;
2343 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2345 tree
2346 build_fixed (tree type, FIXED_VALUE_TYPE f)
2348 tree v;
2349 FIXED_VALUE_TYPE *fp;
2351 v = make_node (FIXED_CST);
2352 fp = ggc_alloc<fixed_value> ();
2353 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2355 TREE_TYPE (v) = type;
2356 TREE_FIXED_CST_PTR (v) = fp;
2357 return v;
2360 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2362 tree
2363 build_real (tree type, REAL_VALUE_TYPE d)
2365 tree v;
2366 int overflow = 0;
2368 /* dconst{0,1,2,m1,half} are used in various places in
2369 the middle-end and optimizers, allow them here
2370 even for decimal floating point types as an exception
2371 by converting them to decimal. */
2372 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2373 && (d.cl == rvc_normal || d.cl == rvc_zero)
2374 && !d.decimal)
2376 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "1");
2378 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "2");
2380 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "-1");
2382 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2383 decimal_real_from_string (&d, "0.5");
2384 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2386 /* Make sure to give zero the minimum quantum exponent for
2387 the type (which corresponds to all bits zero). */
2388 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2389 char buf[16];
2390 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2391 decimal_real_from_string (&d, buf);
2393 else
2394 gcc_unreachable ();
2397 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2398 Consider doing it via real_convert now. */
2400 v = make_node (REAL_CST);
2401 TREE_TYPE (v) = type;
2402 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2403 TREE_OVERFLOW (v) = overflow;
2404 return v;
2407 /* Like build_real, but first truncate D to the type. */
2409 tree
2410 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2412 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2415 /* Return a new REAL_CST node whose type is TYPE
2416 and whose value is the integer value of the INTEGER_CST node I. */
2418 REAL_VALUE_TYPE
2419 real_value_from_int_cst (const_tree type, const_tree i)
2421 REAL_VALUE_TYPE d;
2423 /* Clear all bits of the real value type so that we can later do
2424 bitwise comparisons to see if two values are the same. */
2425 memset (&d, 0, sizeof d);
2427 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2428 TYPE_SIGN (TREE_TYPE (i)));
2429 return d;
2432 /* Given a tree representing an integer constant I, return a tree
2433 representing the same value as a floating-point constant of type TYPE. */
2435 tree
2436 build_real_from_int_cst (tree type, const_tree i)
2438 tree v;
2439 int overflow = TREE_OVERFLOW (i);
2441 v = build_real (type, real_value_from_int_cst (type, i));
2443 TREE_OVERFLOW (v) |= overflow;
2444 return v;
2447 /* Return a new REAL_CST node whose type is TYPE
2448 and whose value is the integer value I which has sign SGN. */
2450 tree
2451 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2453 REAL_VALUE_TYPE d;
2455 /* Clear all bits of the real value type so that we can later do
2456 bitwise comparisons to see if two values are the same. */
2457 memset (&d, 0, sizeof d);
2459 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2460 return build_real (type, d);
2463 /* Return a newly constructed STRING_CST node whose value is the LEN
2464 characters at STR when STR is nonnull, or all zeros otherwise.
2465 Note that for a C string literal, LEN should include the trailing NUL.
2466 The TREE_TYPE is not initialized. */
2468 tree
2469 build_string (unsigned len, const char *str /*= NULL */)
2471 /* Do not waste bytes provided by padding of struct tree_string. */
2472 unsigned size = len + offsetof (struct tree_string, str) + 1;
2474 record_node_allocation_statistics (STRING_CST, size);
2476 tree s = (tree) ggc_internal_alloc (size);
2478 memset (s, 0, sizeof (struct tree_typed));
2479 TREE_SET_CODE (s, STRING_CST);
2480 TREE_CONSTANT (s) = 1;
2481 TREE_STRING_LENGTH (s) = len;
2482 if (str)
2483 memcpy (s->string.str, str, len);
2484 else
2485 memset (s->string.str, 0, len);
2486 s->string.str[len] = '\0';
2488 return s;
2491 /* Return a newly constructed COMPLEX_CST node whose value is
2492 specified by the real and imaginary parts REAL and IMAG.
2493 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2494 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2496 tree
2497 build_complex (tree type, tree real, tree imag)
2499 gcc_assert (CONSTANT_CLASS_P (real));
2500 gcc_assert (CONSTANT_CLASS_P (imag));
2502 tree t = make_node (COMPLEX_CST);
2504 TREE_REALPART (t) = real;
2505 TREE_IMAGPART (t) = imag;
2506 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2507 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2508 return t;
2511 /* Build a complex (inf +- 0i), such as for the result of cproj.
2512 TYPE is the complex tree type of the result. If NEG is true, the
2513 imaginary zero is negative. */
2515 tree
2516 build_complex_inf (tree type, bool neg)
2518 REAL_VALUE_TYPE rzero = dconst0;
2520 rzero.sign = neg;
2521 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2522 build_real (TREE_TYPE (type), rzero));
2525 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2526 element is set to 1. In particular, this is 1 + i for complex types. */
2528 tree
2529 build_each_one_cst (tree type)
2531 if (TREE_CODE (type) == COMPLEX_TYPE)
2533 tree scalar = build_one_cst (TREE_TYPE (type));
2534 return build_complex (type, scalar, scalar);
2536 else
2537 return build_one_cst (type);
2540 /* Return a constant of arithmetic type TYPE which is the
2541 multiplicative identity of the set TYPE. */
2543 tree
2544 build_one_cst (tree type)
2546 switch (TREE_CODE (type))
2548 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2549 case POINTER_TYPE: case REFERENCE_TYPE:
2550 case OFFSET_TYPE: case BITINT_TYPE:
2551 return build_int_cst (type, 1);
2553 case REAL_TYPE:
2554 return build_real (type, dconst1);
2556 case FIXED_POINT_TYPE:
2557 /* We can only generate 1 for accum types. */
2558 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2559 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2561 case VECTOR_TYPE:
2563 tree scalar = build_one_cst (TREE_TYPE (type));
2565 return build_vector_from_val (type, scalar);
2568 case COMPLEX_TYPE:
2569 return build_complex (type,
2570 build_one_cst (TREE_TYPE (type)),
2571 build_zero_cst (TREE_TYPE (type)));
2573 default:
2574 gcc_unreachable ();
2578 /* Return an integer of type TYPE containing all 1's in as much precision as
2579 it contains, or a complex or vector whose subparts are such integers. */
2581 tree
2582 build_all_ones_cst (tree type)
2584 if (TREE_CODE (type) == COMPLEX_TYPE)
2586 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2587 return build_complex (type, scalar, scalar);
2589 else
2590 return build_minus_one_cst (type);
2593 /* Return a constant of arithmetic type TYPE which is the
2594 opposite of the multiplicative identity of the set TYPE. */
2596 tree
2597 build_minus_one_cst (tree type)
2599 switch (TREE_CODE (type))
2601 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 case OFFSET_TYPE: case BITINT_TYPE:
2604 return build_int_cst (type, -1);
2606 case REAL_TYPE:
2607 return build_real (type, dconstm1);
2609 case FIXED_POINT_TYPE:
2610 /* We can only generate 1 for accum types. */
2611 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2612 return build_fixed (type,
2613 fixed_from_double_int (double_int_minus_one,
2614 SCALAR_TYPE_MODE (type)));
2616 case VECTOR_TYPE:
2618 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2620 return build_vector_from_val (type, scalar);
2623 case COMPLEX_TYPE:
2624 return build_complex (type,
2625 build_minus_one_cst (TREE_TYPE (type)),
2626 build_zero_cst (TREE_TYPE (type)));
2628 default:
2629 gcc_unreachable ();
2633 /* Build 0 constant of type TYPE. This is used by constructor folding
2634 and thus the constant should be represented in memory by
2635 zero(es). */
2637 tree
2638 build_zero_cst (tree type)
2640 switch (TREE_CODE (type))
2642 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2643 case POINTER_TYPE: case REFERENCE_TYPE:
2644 case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
2645 return build_int_cst (type, 0);
2647 case REAL_TYPE:
2648 return build_real (type, dconst0);
2650 case FIXED_POINT_TYPE:
2651 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2653 case VECTOR_TYPE:
2655 tree scalar = build_zero_cst (TREE_TYPE (type));
2657 return build_vector_from_val (type, scalar);
2660 case COMPLEX_TYPE:
2662 tree zero = build_zero_cst (TREE_TYPE (type));
2664 return build_complex (type, zero, zero);
2667 default:
2668 if (!AGGREGATE_TYPE_P (type))
2669 return fold_convert (type, integer_zero_node);
2670 return build_constructor (type, NULL);
2674 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2675 every WIDTH bits to fit TYPE's precision. */
2677 tree
2678 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2680 int n = ((TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2681 / HOST_BITS_PER_WIDE_INT);
2682 unsigned HOST_WIDE_INT low, mask;
2683 HOST_WIDE_INT a[WIDE_INT_MAX_INL_ELTS];
2684 int i;
2686 gcc_assert (n && n <= WIDE_INT_MAX_INL_ELTS);
2688 if (width == HOST_BITS_PER_WIDE_INT)
2689 low = value;
2690 else
2692 mask = ((HOST_WIDE_INT)1 << width) - 1;
2693 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2696 for (i = 0; i < n; i++)
2697 a[i] = low;
2699 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2700 return wide_int_to_tree (type, wide_int::from_array (a, n,
2701 TYPE_PRECISION (type)));
2704 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2705 unsigned constant in which only the sign bit is set. Return null
2706 otherwise. */
2708 tree
2709 sign_mask_for (tree type)
2711 /* Avoid having to choose between a real-only sign and a pair of signs.
2712 This could be relaxed if the choice becomes obvious later. */
2713 if (TREE_CODE (type) == COMPLEX_TYPE)
2714 return NULL_TREE;
2716 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2717 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2718 if (!bits || !pow2p_hwi (bits))
2719 return NULL_TREE;
2721 tree inttype = unsigned_type_for (type);
2722 if (!inttype)
2723 return NULL_TREE;
2725 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2726 if (VECTOR_TYPE_P (inttype))
2728 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2729 return build_vector_from_val (inttype, elt);
2731 return wide_int_to_tree (inttype, mask);
2734 /* Build a BINFO with LEN language slots. */
2736 tree
2737 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2739 tree t;
2740 size_t length = (offsetof (struct tree_binfo, base_binfos)
2741 + vec<tree, va_gc>::embedded_size (base_binfos));
2743 record_node_allocation_statistics (TREE_BINFO, length);
2745 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2747 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2749 TREE_SET_CODE (t, TREE_BINFO);
2751 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2753 return t;
2756 /* Create a CASE_LABEL_EXPR tree node and return it. */
2758 tree
2759 build_case_label (tree low_value, tree high_value, tree label_decl)
2761 tree t = make_node (CASE_LABEL_EXPR);
2763 TREE_TYPE (t) = void_type_node;
2764 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2766 CASE_LOW (t) = low_value;
2767 CASE_HIGH (t) = high_value;
2768 CASE_LABEL (t) = label_decl;
2769 CASE_CHAIN (t) = NULL_TREE;
2771 return t;
2774 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2775 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2776 The latter determines the length of the HOST_WIDE_INT vector. */
2778 tree
2779 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2781 tree t;
2782 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2783 + sizeof (struct tree_int_cst));
2785 gcc_assert (len);
2786 record_node_allocation_statistics (INTEGER_CST, length);
2788 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2790 TREE_SET_CODE (t, INTEGER_CST);
2791 TREE_INT_CST_NUNITS (t) = len;
2792 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2793 TREE_CONSTANT (t) = 1;
2795 return t;
2798 /* Build a newly constructed TREE_VEC node of length LEN. */
2800 tree
2801 make_tree_vec (int len MEM_STAT_DECL)
2803 tree t;
2804 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2806 record_node_allocation_statistics (TREE_VEC, length);
2808 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2810 TREE_SET_CODE (t, TREE_VEC);
2811 TREE_VEC_LENGTH (t) = len;
2813 return t;
2816 /* Grow a TREE_VEC node to new length LEN. */
2818 tree
2819 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2821 gcc_assert (TREE_CODE (v) == TREE_VEC);
2823 int oldlen = TREE_VEC_LENGTH (v);
2824 gcc_assert (len > oldlen);
2826 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2827 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2829 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2831 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2833 TREE_VEC_LENGTH (v) = len;
2835 return v;
2838 /* Return true if EXPR is the constant zero, whether it is integral, float or
2839 fixed, and scalar, complex or vector. */
2841 bool
2842 zerop (const_tree expr)
2844 return (integer_zerop (expr)
2845 || real_zerop (expr)
2846 || fixed_zerop (expr));
2849 /* Return true if EXPR is the integer constant zero or a complex constant
2850 of zero, or a location wrapper for such a constant. */
2852 bool
2853 integer_zerop (const_tree expr)
2855 STRIP_ANY_LOCATION_WRAPPER (expr);
2857 switch (TREE_CODE (expr))
2859 case INTEGER_CST:
2860 return wi::to_wide (expr) == 0;
2861 case COMPLEX_CST:
2862 return (integer_zerop (TREE_REALPART (expr))
2863 && integer_zerop (TREE_IMAGPART (expr)));
2864 case VECTOR_CST:
2865 return (VECTOR_CST_NPATTERNS (expr) == 1
2866 && VECTOR_CST_DUPLICATE_P (expr)
2867 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2868 default:
2869 return false;
2873 /* Return true if EXPR is the integer constant one or the corresponding
2874 complex constant, or a location wrapper for such a constant. */
2876 bool
2877 integer_onep (const_tree expr)
2879 STRIP_ANY_LOCATION_WRAPPER (expr);
2881 switch (TREE_CODE (expr))
2883 case INTEGER_CST:
2884 return wi::eq_p (wi::to_widest (expr), 1);
2885 case COMPLEX_CST:
2886 return (integer_onep (TREE_REALPART (expr))
2887 && integer_zerop (TREE_IMAGPART (expr)));
2888 case VECTOR_CST:
2889 return (VECTOR_CST_NPATTERNS (expr) == 1
2890 && VECTOR_CST_DUPLICATE_P (expr)
2891 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2892 default:
2893 return false;
2897 /* Return true if EXPR is the integer constant one. For complex and vector,
2898 return true if every piece is the integer constant one.
2899 Also return true for location wrappers for such a constant. */
2901 bool
2902 integer_each_onep (const_tree expr)
2904 STRIP_ANY_LOCATION_WRAPPER (expr);
2906 if (TREE_CODE (expr) == COMPLEX_CST)
2907 return (integer_onep (TREE_REALPART (expr))
2908 && integer_onep (TREE_IMAGPART (expr)));
2909 else
2910 return integer_onep (expr);
2913 /* Return true if EXPR is an integer containing all 1's in as much precision
2914 as it contains, or a complex or vector whose subparts are such integers,
2915 or a location wrapper for such a constant. */
2917 bool
2918 integer_all_onesp (const_tree expr)
2920 STRIP_ANY_LOCATION_WRAPPER (expr);
2922 if (TREE_CODE (expr) == COMPLEX_CST
2923 && integer_all_onesp (TREE_REALPART (expr))
2924 && integer_all_onesp (TREE_IMAGPART (expr)))
2925 return true;
2927 else if (TREE_CODE (expr) == VECTOR_CST)
2928 return (VECTOR_CST_NPATTERNS (expr) == 1
2929 && VECTOR_CST_DUPLICATE_P (expr)
2930 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2932 else if (TREE_CODE (expr) != INTEGER_CST)
2933 return false;
2935 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2936 == wi::to_wide (expr));
2939 /* Return true if EXPR is the integer constant minus one, or a location
2940 wrapper for such a constant. */
2942 bool
2943 integer_minus_onep (const_tree expr)
2945 STRIP_ANY_LOCATION_WRAPPER (expr);
2947 if (TREE_CODE (expr) == COMPLEX_CST)
2948 return (integer_all_onesp (TREE_REALPART (expr))
2949 && integer_zerop (TREE_IMAGPART (expr)));
2950 else
2951 return integer_all_onesp (expr);
2954 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2955 only one bit on), or a location wrapper for such a constant. */
2957 bool
2958 integer_pow2p (const_tree expr)
2960 STRIP_ANY_LOCATION_WRAPPER (expr);
2962 if (TREE_CODE (expr) == COMPLEX_CST
2963 && integer_pow2p (TREE_REALPART (expr))
2964 && integer_zerop (TREE_IMAGPART (expr)))
2965 return true;
2967 if (TREE_CODE (expr) != INTEGER_CST)
2968 return false;
2970 return wi::popcount (wi::to_wide (expr)) == 1;
2973 /* Return true if EXPR is an integer constant other than zero or a
2974 complex constant other than zero, or a location wrapper for such a
2975 constant. */
2977 bool
2978 integer_nonzerop (const_tree expr)
2980 STRIP_ANY_LOCATION_WRAPPER (expr);
2982 return ((TREE_CODE (expr) == INTEGER_CST
2983 && wi::to_wide (expr) != 0)
2984 || (TREE_CODE (expr) == COMPLEX_CST
2985 && (integer_nonzerop (TREE_REALPART (expr))
2986 || integer_nonzerop (TREE_IMAGPART (expr)))));
2989 /* Return true if EXPR is the integer constant one. For vector,
2990 return true if every piece is the integer constant minus one
2991 (representing the value TRUE).
2992 Also return true for location wrappers for such a constant. */
2994 bool
2995 integer_truep (const_tree expr)
2997 STRIP_ANY_LOCATION_WRAPPER (expr);
2999 if (TREE_CODE (expr) == VECTOR_CST)
3000 return integer_all_onesp (expr);
3001 return integer_onep (expr);
3004 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3005 for such a constant. */
3007 bool
3008 fixed_zerop (const_tree expr)
3010 STRIP_ANY_LOCATION_WRAPPER (expr);
3012 return (TREE_CODE (expr) == FIXED_CST
3013 && TREE_FIXED_CST (expr).data.is_zero ());
3016 /* Return the power of two represented by a tree node known to be a
3017 power of two. */
3020 tree_log2 (const_tree expr)
3022 if (TREE_CODE (expr) == COMPLEX_CST)
3023 return tree_log2 (TREE_REALPART (expr));
3025 return wi::exact_log2 (wi::to_wide (expr));
3028 /* Similar, but return the largest integer Y such that 2 ** Y is less
3029 than or equal to EXPR. */
3032 tree_floor_log2 (const_tree expr)
3034 if (TREE_CODE (expr) == COMPLEX_CST)
3035 return tree_log2 (TREE_REALPART (expr));
3037 return wi::floor_log2 (wi::to_wide (expr));
3040 /* Return number of known trailing zero bits in EXPR, or, if the value of
3041 EXPR is known to be zero, the precision of it's type. */
3043 unsigned int
3044 tree_ctz (const_tree expr)
3046 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3047 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3048 return 0;
3050 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3051 switch (TREE_CODE (expr))
3053 case INTEGER_CST:
3054 ret1 = wi::ctz (wi::to_wide (expr));
3055 return MIN (ret1, prec);
3056 case SSA_NAME:
3057 ret1 = wi::ctz (get_nonzero_bits (expr));
3058 return MIN (ret1, prec);
3059 case PLUS_EXPR:
3060 case MINUS_EXPR:
3061 case BIT_IOR_EXPR:
3062 case BIT_XOR_EXPR:
3063 case MIN_EXPR:
3064 case MAX_EXPR:
3065 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3066 if (ret1 == 0)
3067 return ret1;
3068 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3069 return MIN (ret1, ret2);
3070 case POINTER_PLUS_EXPR:
3071 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3072 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3073 /* Second operand is sizetype, which could be in theory
3074 wider than pointer's precision. Make sure we never
3075 return more than prec. */
3076 ret2 = MIN (ret2, prec);
3077 return MIN (ret1, ret2);
3078 case BIT_AND_EXPR:
3079 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3080 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3081 return MAX (ret1, ret2);
3082 case MULT_EXPR:
3083 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3084 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3085 return MIN (ret1 + ret2, prec);
3086 case LSHIFT_EXPR:
3087 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3088 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3089 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3091 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3092 return MIN (ret1 + ret2, prec);
3094 return ret1;
3095 case RSHIFT_EXPR:
3096 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3097 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3099 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3100 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3101 if (ret1 > ret2)
3102 return ret1 - ret2;
3104 return 0;
3105 case TRUNC_DIV_EXPR:
3106 case CEIL_DIV_EXPR:
3107 case FLOOR_DIV_EXPR:
3108 case ROUND_DIV_EXPR:
3109 case EXACT_DIV_EXPR:
3110 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3111 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3113 int l = tree_log2 (TREE_OPERAND (expr, 1));
3114 if (l >= 0)
3116 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3117 ret2 = l;
3118 if (ret1 > ret2)
3119 return ret1 - ret2;
3122 return 0;
3123 CASE_CONVERT:
3124 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3125 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3126 ret1 = prec;
3127 return MIN (ret1, prec);
3128 case SAVE_EXPR:
3129 return tree_ctz (TREE_OPERAND (expr, 0));
3130 case COND_EXPR:
3131 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3132 if (ret1 == 0)
3133 return 0;
3134 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3135 return MIN (ret1, ret2);
3136 case COMPOUND_EXPR:
3137 return tree_ctz (TREE_OPERAND (expr, 1));
3138 case ADDR_EXPR:
3139 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3140 if (ret1 > BITS_PER_UNIT)
3142 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3143 return MIN (ret1, prec);
3145 return 0;
3146 default:
3147 return 0;
3151 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3152 decimal float constants, so don't return true for them.
3153 Also return true for location wrappers around such a constant. */
3155 bool
3156 real_zerop (const_tree expr)
3158 STRIP_ANY_LOCATION_WRAPPER (expr);
3160 switch (TREE_CODE (expr))
3162 case REAL_CST:
3163 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3164 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3165 case COMPLEX_CST:
3166 return real_zerop (TREE_REALPART (expr))
3167 && real_zerop (TREE_IMAGPART (expr));
3168 case VECTOR_CST:
3170 /* Don't simply check for a duplicate because the predicate
3171 accepts both +0.0 and -0.0. */
3172 unsigned count = vector_cst_encoded_nelts (expr);
3173 for (unsigned int i = 0; i < count; ++i)
3174 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3175 return false;
3176 return true;
3178 default:
3179 return false;
3183 /* Return true if EXPR is the real constant one in real or complex form.
3184 Trailing zeroes matter for decimal float constants, so don't return
3185 true for them.
3186 Also return true for location wrappers around such a constant. */
3188 bool
3189 real_onep (const_tree expr)
3191 STRIP_ANY_LOCATION_WRAPPER (expr);
3193 switch (TREE_CODE (expr))
3195 case REAL_CST:
3196 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3197 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3198 case COMPLEX_CST:
3199 return real_onep (TREE_REALPART (expr))
3200 && real_zerop (TREE_IMAGPART (expr));
3201 case VECTOR_CST:
3202 return (VECTOR_CST_NPATTERNS (expr) == 1
3203 && VECTOR_CST_DUPLICATE_P (expr)
3204 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3205 default:
3206 return false;
3210 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3211 matter for decimal float constants, so don't return true for them.
3212 Also return true for location wrappers around such a constant. */
3214 bool
3215 real_minus_onep (const_tree expr)
3217 STRIP_ANY_LOCATION_WRAPPER (expr);
3219 switch (TREE_CODE (expr))
3221 case REAL_CST:
3222 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3223 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3224 case COMPLEX_CST:
3225 return real_minus_onep (TREE_REALPART (expr))
3226 && real_zerop (TREE_IMAGPART (expr));
3227 case VECTOR_CST:
3228 return (VECTOR_CST_NPATTERNS (expr) == 1
3229 && VECTOR_CST_DUPLICATE_P (expr)
3230 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3231 default:
3232 return false;
3236 /* Return true if T could be a floating point zero. */
3238 bool
3239 real_maybe_zerop (const_tree expr)
3241 switch (TREE_CODE (expr))
3243 case REAL_CST:
3244 /* Can't use real_zerop here, as it always returns false for decimal
3245 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3246 either, as decimal zeros are rvc_normal. */
3247 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3248 case COMPLEX_CST:
3249 return (real_maybe_zerop (TREE_REALPART (expr))
3250 || real_maybe_zerop (TREE_IMAGPART (expr)));
3251 case VECTOR_CST:
3253 unsigned count = vector_cst_encoded_nelts (expr);
3254 for (unsigned int i = 0; i < count; ++i)
3255 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3256 return true;
3257 return false;
3259 default:
3260 /* Perhaps for SSA_NAMEs we could query frange. */
3261 return true;
3265 /* True if EXP is a constant or a cast of a constant. */
3267 bool
3268 really_constant_p (const_tree exp)
3270 /* This is not quite the same as STRIP_NOPS. It does more. */
3271 while (CONVERT_EXPR_P (exp)
3272 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3273 exp = TREE_OPERAND (exp, 0);
3274 return TREE_CONSTANT (exp);
3277 /* Return true if T holds a polynomial pointer difference, storing it in
3278 *VALUE if so. A true return means that T's precision is no greater
3279 than 64 bits, which is the largest address space we support, so *VALUE
3280 never loses precision. However, the signedness of the result does
3281 not necessarily match the signedness of T: sometimes an unsigned type
3282 like sizetype is used to encode a value that is actually negative. */
3284 bool
3285 ptrdiff_tree_p (const_tree t, poly_int64 *value)
3287 if (!t)
3288 return false;
3289 if (TREE_CODE (t) == INTEGER_CST)
3291 if (!cst_and_fits_in_hwi (t))
3292 return false;
3293 *value = int_cst_value (t);
3294 return true;
3296 if (POLY_INT_CST_P (t))
3298 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3299 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3300 return false;
3301 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3302 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3303 return true;
3305 return false;
3308 poly_int64
3309 tree_to_poly_int64 (const_tree t)
3311 gcc_assert (tree_fits_poly_int64_p (t));
3312 if (POLY_INT_CST_P (t))
3313 return poly_int_cst_value (t).force_shwi ();
3314 return TREE_INT_CST_LOW (t);
3317 poly_uint64
3318 tree_to_poly_uint64 (const_tree t)
3320 gcc_assert (tree_fits_poly_uint64_p (t));
3321 if (POLY_INT_CST_P (t))
3322 return poly_int_cst_value (t).force_uhwi ();
3323 return TREE_INT_CST_LOW (t);
3326 /* Return first list element whose TREE_VALUE is ELEM.
3327 Return 0 if ELEM is not in LIST. */
3329 tree
3330 value_member (tree elem, tree list)
3332 while (list)
3334 if (elem == TREE_VALUE (list))
3335 return list;
3336 list = TREE_CHAIN (list);
3338 return NULL_TREE;
3341 /* Return first list element whose TREE_PURPOSE is ELEM.
3342 Return 0 if ELEM is not in LIST. */
3344 tree
3345 purpose_member (const_tree elem, tree list)
3347 while (list)
3349 if (elem == TREE_PURPOSE (list))
3350 return list;
3351 list = TREE_CHAIN (list);
3353 return NULL_TREE;
3356 /* Return true if ELEM is in V. */
3358 bool
3359 vec_member (const_tree elem, vec<tree, va_gc> *v)
3361 unsigned ix;
3362 tree t;
3363 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3364 if (elem == t)
3365 return true;
3366 return false;
3369 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3370 NULL_TREE. */
3372 tree
3373 chain_index (int idx, tree chain)
3375 for (; chain && idx > 0; --idx)
3376 chain = TREE_CHAIN (chain);
3377 return chain;
3380 /* Return true if ELEM is part of the chain CHAIN. */
3382 bool
3383 chain_member (const_tree elem, const_tree chain)
3385 while (chain)
3387 if (elem == chain)
3388 return true;
3389 chain = DECL_CHAIN (chain);
3392 return false;
3395 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3396 We expect a null pointer to mark the end of the chain.
3397 This is the Lisp primitive `length'. */
3400 list_length (const_tree t)
3402 const_tree p = t;
3403 #ifdef ENABLE_TREE_CHECKING
3404 const_tree q = t;
3405 #endif
3406 int len = 0;
3408 while (p)
3410 p = TREE_CHAIN (p);
3411 #ifdef ENABLE_TREE_CHECKING
3412 if (len % 2)
3413 q = TREE_CHAIN (q);
3414 gcc_assert (p != q);
3415 #endif
3416 len++;
3419 return len;
3422 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3423 UNION_TYPE TYPE, or NULL_TREE if none. */
3425 tree
3426 first_field (const_tree type)
3428 tree t = TYPE_FIELDS (type);
3429 while (t && TREE_CODE (t) != FIELD_DECL)
3430 t = TREE_CHAIN (t);
3431 return t;
3434 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3435 UNION_TYPE TYPE, or NULL_TREE if none. */
3437 tree
3438 last_field (const_tree type)
3440 tree last = NULL_TREE;
3442 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3444 if (TREE_CODE (fld) != FIELD_DECL)
3445 continue;
3447 last = fld;
3450 return last;
3453 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3454 by modifying the last node in chain 1 to point to chain 2.
3455 This is the Lisp primitive `nconc'. */
3457 tree
3458 chainon (tree op1, tree op2)
3460 tree t1;
3462 if (!op1)
3463 return op2;
3464 if (!op2)
3465 return op1;
3467 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3468 continue;
3469 TREE_CHAIN (t1) = op2;
3471 #ifdef ENABLE_TREE_CHECKING
3473 tree t2;
3474 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3475 gcc_assert (t2 != t1);
3477 #endif
3479 return op1;
3482 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3484 tree
3485 tree_last (tree chain)
3487 tree next;
3488 if (chain)
3489 while ((next = TREE_CHAIN (chain)))
3490 chain = next;
3491 return chain;
3494 /* Reverse the order of elements in the chain T,
3495 and return the new head of the chain (old last element). */
3497 tree
3498 nreverse (tree t)
3500 tree prev = 0, decl, next;
3501 for (decl = t; decl; decl = next)
3503 /* We shouldn't be using this function to reverse BLOCK chains; we
3504 have blocks_nreverse for that. */
3505 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3506 next = TREE_CHAIN (decl);
3507 TREE_CHAIN (decl) = prev;
3508 prev = decl;
3510 return prev;
3513 /* Return a newly created TREE_LIST node whose
3514 purpose and value fields are PARM and VALUE. */
3516 tree
3517 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3519 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3520 TREE_PURPOSE (t) = parm;
3521 TREE_VALUE (t) = value;
3522 return t;
3525 /* Build a chain of TREE_LIST nodes from a vector. */
3527 tree
3528 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3530 tree ret = NULL_TREE;
3531 tree *pp = &ret;
3532 unsigned int i;
3533 tree t;
3534 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3536 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3537 pp = &TREE_CHAIN (*pp);
3539 return ret;
3542 /* Return a newly created TREE_LIST node whose
3543 purpose and value fields are PURPOSE and VALUE
3544 and whose TREE_CHAIN is CHAIN. */
3546 tree
3547 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3549 tree node;
3551 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3552 memset (node, 0, sizeof (struct tree_common));
3554 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3556 TREE_SET_CODE (node, TREE_LIST);
3557 TREE_CHAIN (node) = chain;
3558 TREE_PURPOSE (node) = purpose;
3559 TREE_VALUE (node) = value;
3560 return node;
3563 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3564 trees. */
3566 vec<tree, va_gc> *
3567 ctor_to_vec (tree ctor)
3569 vec<tree, va_gc> *vec;
3570 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3571 unsigned int ix;
3572 tree val;
3574 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3575 vec->quick_push (val);
3577 return vec;
3580 /* Return the size nominally occupied by an object of type TYPE
3581 when it resides in memory. The value is measured in units of bytes,
3582 and its data type is that normally used for type sizes
3583 (which is the first type created by make_signed_type or
3584 make_unsigned_type). */
3586 tree
3587 size_in_bytes_loc (location_t loc, const_tree type)
3589 tree t;
3591 if (type == error_mark_node)
3592 return integer_zero_node;
3594 type = TYPE_MAIN_VARIANT (type);
3595 t = TYPE_SIZE_UNIT (type);
3597 if (t == 0)
3599 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3600 return size_zero_node;
3603 return t;
3606 /* Return the size of TYPE (in bytes) as a wide integer
3607 or return -1 if the size can vary or is larger than an integer. */
3609 HOST_WIDE_INT
3610 int_size_in_bytes (const_tree type)
3612 tree t;
3614 if (type == error_mark_node)
3615 return 0;
3617 type = TYPE_MAIN_VARIANT (type);
3618 t = TYPE_SIZE_UNIT (type);
3620 if (t && tree_fits_uhwi_p (t))
3621 return TREE_INT_CST_LOW (t);
3622 else
3623 return -1;
3626 /* Return the maximum size of TYPE (in bytes) as a wide integer
3627 or return -1 if the size can vary or is larger than an integer. */
3629 HOST_WIDE_INT
3630 max_int_size_in_bytes (const_tree type)
3632 HOST_WIDE_INT size = -1;
3633 tree size_tree;
3635 /* If this is an array type, check for a possible MAX_SIZE attached. */
3637 if (TREE_CODE (type) == ARRAY_TYPE)
3639 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3641 if (size_tree && tree_fits_uhwi_p (size_tree))
3642 size = tree_to_uhwi (size_tree);
3645 /* If we still haven't been able to get a size, see if the language
3646 can compute a maximum size. */
3648 if (size == -1)
3650 size_tree = lang_hooks.types.max_size (type);
3652 if (size_tree && tree_fits_uhwi_p (size_tree))
3653 size = tree_to_uhwi (size_tree);
3656 return size;
3659 /* Return the bit position of FIELD, in bits from the start of the record.
3660 This is a tree of type bitsizetype. */
3662 tree
3663 bit_position (const_tree field)
3665 return bit_from_pos (DECL_FIELD_OFFSET (field),
3666 DECL_FIELD_BIT_OFFSET (field));
3669 /* Return the byte position of FIELD, in bytes from the start of the record.
3670 This is a tree of type sizetype. */
3672 tree
3673 byte_position (const_tree field)
3675 return byte_from_pos (DECL_FIELD_OFFSET (field),
3676 DECL_FIELD_BIT_OFFSET (field));
3679 /* Likewise, but return as an integer. It must be representable in
3680 that way (since it could be a signed value, we don't have the
3681 option of returning -1 like int_size_in_byte can. */
3683 HOST_WIDE_INT
3684 int_byte_position (const_tree field)
3686 return tree_to_shwi (byte_position (field));
3689 /* Return, as a tree node, the number of elements for TYPE (which is an
3690 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3692 tree
3693 array_type_nelts (const_tree type)
3695 tree index_type, min, max;
3697 /* If they did it with unspecified bounds, then we should have already
3698 given an error about it before we got here. */
3699 if (! TYPE_DOMAIN (type))
3700 return error_mark_node;
3702 index_type = TYPE_DOMAIN (type);
3703 min = TYPE_MIN_VALUE (index_type);
3704 max = TYPE_MAX_VALUE (index_type);
3706 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3707 if (!max)
3709 /* zero sized arrays are represented from C FE as complete types with
3710 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3711 them as min 0, max -1. */
3712 if (COMPLETE_TYPE_P (type)
3713 && integer_zerop (TYPE_SIZE (type))
3714 && integer_zerop (min))
3715 return build_int_cst (TREE_TYPE (min), -1);
3717 return error_mark_node;
3720 return (integer_zerop (min)
3721 ? max
3722 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3725 /* If arg is static -- a reference to an object in static storage -- then
3726 return the object. This is not the same as the C meaning of `static'.
3727 If arg isn't static, return NULL. */
3729 tree
3730 staticp (tree arg)
3732 switch (TREE_CODE (arg))
3734 case FUNCTION_DECL:
3735 /* Nested functions are static, even though taking their address will
3736 involve a trampoline as we unnest the nested function and create
3737 the trampoline on the tree level. */
3738 return arg;
3740 case VAR_DECL:
3741 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3742 && ! DECL_THREAD_LOCAL_P (arg)
3743 && ! DECL_DLLIMPORT_P (arg)
3744 ? arg : NULL);
3746 case CONST_DECL:
3747 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3748 ? arg : NULL);
3750 case CONSTRUCTOR:
3751 return TREE_STATIC (arg) ? arg : NULL;
3753 case LABEL_DECL:
3754 case STRING_CST:
3755 return arg;
3757 case COMPONENT_REF:
3758 /* If the thing being referenced is not a field, then it is
3759 something language specific. */
3760 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3762 /* If we are referencing a bitfield, we can't evaluate an
3763 ADDR_EXPR at compile time and so it isn't a constant. */
3764 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3765 return NULL;
3767 return staticp (TREE_OPERAND (arg, 0));
3769 case BIT_FIELD_REF:
3770 return NULL;
3772 case INDIRECT_REF:
3773 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3775 case ARRAY_REF:
3776 case ARRAY_RANGE_REF:
3777 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3778 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3779 return staticp (TREE_OPERAND (arg, 0));
3780 else
3781 return NULL;
3783 case COMPOUND_LITERAL_EXPR:
3784 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3786 default:
3787 return NULL;
3794 /* Return whether OP is a DECL whose address is function-invariant. */
3796 bool
3797 decl_address_invariant_p (const_tree op)
3799 /* The conditions below are slightly less strict than the one in
3800 staticp. */
3802 switch (TREE_CODE (op))
3804 case PARM_DECL:
3805 case RESULT_DECL:
3806 case LABEL_DECL:
3807 case FUNCTION_DECL:
3808 return true;
3810 case VAR_DECL:
3811 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3812 || DECL_THREAD_LOCAL_P (op)
3813 || DECL_CONTEXT (op) == current_function_decl
3814 || decl_function_context (op) == current_function_decl)
3815 return true;
3816 break;
3818 case CONST_DECL:
3819 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3820 || decl_function_context (op) == current_function_decl)
3821 return true;
3822 break;
3824 default:
3825 break;
3828 return false;
3831 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3833 bool
3834 decl_address_ip_invariant_p (const_tree op)
3836 /* The conditions below are slightly less strict than the one in
3837 staticp. */
3839 switch (TREE_CODE (op))
3841 case LABEL_DECL:
3842 case FUNCTION_DECL:
3843 case STRING_CST:
3844 return true;
3846 case VAR_DECL:
3847 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3848 && !DECL_DLLIMPORT_P (op))
3849 || DECL_THREAD_LOCAL_P (op))
3850 return true;
3851 break;
3853 case CONST_DECL:
3854 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3855 return true;
3856 break;
3858 default:
3859 break;
3862 return false;
3866 /* Return true if T is function-invariant (internal function, does
3867 not handle arithmetic; that's handled in skip_simple_arithmetic and
3868 tree_invariant_p). */
3870 static bool
3871 tree_invariant_p_1 (tree t)
3873 tree op;
3875 if (TREE_CONSTANT (t)
3876 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3877 return true;
3879 switch (TREE_CODE (t))
3881 case SAVE_EXPR:
3882 return true;
3884 case ADDR_EXPR:
3885 op = TREE_OPERAND (t, 0);
3886 while (handled_component_p (op))
3888 switch (TREE_CODE (op))
3890 case ARRAY_REF:
3891 case ARRAY_RANGE_REF:
3892 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3893 || TREE_OPERAND (op, 2) != NULL_TREE
3894 || TREE_OPERAND (op, 3) != NULL_TREE)
3895 return false;
3896 break;
3898 case COMPONENT_REF:
3899 if (TREE_OPERAND (op, 2) != NULL_TREE)
3900 return false;
3901 break;
3903 default:;
3905 op = TREE_OPERAND (op, 0);
3908 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3910 default:
3911 break;
3914 return false;
3917 /* Return true if T is function-invariant. */
3919 bool
3920 tree_invariant_p (tree t)
3922 tree inner = skip_simple_arithmetic (t);
3923 return tree_invariant_p_1 (inner);
3926 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3927 Do this to any expression which may be used in more than one place,
3928 but must be evaluated only once.
3930 Normally, expand_expr would reevaluate the expression each time.
3931 Calling save_expr produces something that is evaluated and recorded
3932 the first time expand_expr is called on it. Subsequent calls to
3933 expand_expr just reuse the recorded value.
3935 The call to expand_expr that generates code that actually computes
3936 the value is the first call *at compile time*. Subsequent calls
3937 *at compile time* generate code to use the saved value.
3938 This produces correct result provided that *at run time* control
3939 always flows through the insns made by the first expand_expr
3940 before reaching the other places where the save_expr was evaluated.
3941 You, the caller of save_expr, must make sure this is so.
3943 Constants, and certain read-only nodes, are returned with no
3944 SAVE_EXPR because that is safe. Expressions containing placeholders
3945 are not touched; see tree.def for an explanation of what these
3946 are used for. */
3948 tree
3949 save_expr (tree expr)
3951 tree inner;
3953 /* If the tree evaluates to a constant, then we don't want to hide that
3954 fact (i.e. this allows further folding, and direct checks for constants).
3955 However, a read-only object that has side effects cannot be bypassed.
3956 Since it is no problem to reevaluate literals, we just return the
3957 literal node. */
3958 inner = skip_simple_arithmetic (expr);
3959 if (TREE_CODE (inner) == ERROR_MARK)
3960 return inner;
3962 if (tree_invariant_p_1 (inner))
3963 return expr;
3965 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3966 it means that the size or offset of some field of an object depends on
3967 the value within another field.
3969 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3970 and some variable since it would then need to be both evaluated once and
3971 evaluated more than once. Front-ends must assure this case cannot
3972 happen by surrounding any such subexpressions in their own SAVE_EXPR
3973 and forcing evaluation at the proper time. */
3974 if (contains_placeholder_p (inner))
3975 return expr;
3977 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3979 /* This expression might be placed ahead of a jump to ensure that the
3980 value was computed on both sides of the jump. So make sure it isn't
3981 eliminated as dead. */
3982 TREE_SIDE_EFFECTS (expr) = 1;
3983 return expr;
3986 /* Look inside EXPR into any simple arithmetic operations. Return the
3987 outermost non-arithmetic or non-invariant node. */
3989 tree
3990 skip_simple_arithmetic (tree expr)
3992 /* We don't care about whether this can be used as an lvalue in this
3993 context. */
3994 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3995 expr = TREE_OPERAND (expr, 0);
3997 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3998 a constant, it will be more efficient to not make another SAVE_EXPR since
3999 it will allow better simplification and GCSE will be able to merge the
4000 computations if they actually occur. */
4001 while (true)
4003 if (UNARY_CLASS_P (expr))
4004 expr = TREE_OPERAND (expr, 0);
4005 else if (BINARY_CLASS_P (expr))
4007 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4008 expr = TREE_OPERAND (expr, 0);
4009 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4010 expr = TREE_OPERAND (expr, 1);
4011 else
4012 break;
4014 else
4015 break;
4018 return expr;
4021 /* Look inside EXPR into simple arithmetic operations involving constants.
4022 Return the outermost non-arithmetic or non-constant node. */
4024 tree
4025 skip_simple_constant_arithmetic (tree expr)
4027 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4028 expr = TREE_OPERAND (expr, 0);
4030 while (true)
4032 if (UNARY_CLASS_P (expr))
4033 expr = TREE_OPERAND (expr, 0);
4034 else if (BINARY_CLASS_P (expr))
4036 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4037 expr = TREE_OPERAND (expr, 0);
4038 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4039 expr = TREE_OPERAND (expr, 1);
4040 else
4041 break;
4043 else
4044 break;
4047 return expr;
4050 /* Return which tree structure is used by T. */
4052 enum tree_node_structure_enum
4053 tree_node_structure (const_tree t)
4055 const enum tree_code code = TREE_CODE (t);
4056 return tree_node_structure_for_code (code);
4059 /* Set various status flags when building a CALL_EXPR object T. */
4061 static void
4062 process_call_operands (tree t)
4064 bool side_effects = TREE_SIDE_EFFECTS (t);
4065 bool read_only = false;
4066 int i = call_expr_flags (t);
4068 /* Calls have side-effects, except those to const or pure functions. */
4069 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4070 side_effects = true;
4071 /* Propagate TREE_READONLY of arguments for const functions. */
4072 if (i & ECF_CONST)
4073 read_only = true;
4075 if (!side_effects || read_only)
4076 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4078 tree op = TREE_OPERAND (t, i);
4079 if (op && TREE_SIDE_EFFECTS (op))
4080 side_effects = true;
4081 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4082 read_only = false;
4085 TREE_SIDE_EFFECTS (t) = side_effects;
4086 TREE_READONLY (t) = read_only;
4089 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4090 size or offset that depends on a field within a record. */
4092 bool
4093 contains_placeholder_p (const_tree exp)
4095 enum tree_code code;
4097 if (!exp)
4098 return false;
4100 code = TREE_CODE (exp);
4101 if (code == PLACEHOLDER_EXPR)
4102 return true;
4104 switch (TREE_CODE_CLASS (code))
4106 case tcc_reference:
4107 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4108 position computations since they will be converted into a
4109 WITH_RECORD_EXPR involving the reference, which will assume
4110 here will be valid. */
4111 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4113 case tcc_exceptional:
4114 if (code == TREE_LIST)
4115 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4116 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4117 break;
4119 case tcc_unary:
4120 case tcc_binary:
4121 case tcc_comparison:
4122 case tcc_expression:
4123 switch (code)
4125 case COMPOUND_EXPR:
4126 /* Ignoring the first operand isn't quite right, but works best. */
4127 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4129 case COND_EXPR:
4130 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4131 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4132 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4134 case SAVE_EXPR:
4135 /* The save_expr function never wraps anything containing
4136 a PLACEHOLDER_EXPR. */
4137 return false;
4139 default:
4140 break;
4143 switch (TREE_CODE_LENGTH (code))
4145 case 1:
4146 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4147 case 2:
4148 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4149 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4150 default:
4151 return false;
4154 case tcc_vl_exp:
4155 switch (code)
4157 case CALL_EXPR:
4159 const_tree arg;
4160 const_call_expr_arg_iterator iter;
4161 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4162 if (CONTAINS_PLACEHOLDER_P (arg))
4163 return true;
4164 return false;
4166 default:
4167 return false;
4170 default:
4171 return false;
4173 return false;
4176 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4177 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4178 field positions. */
4180 static bool
4181 type_contains_placeholder_1 (const_tree type)
4183 /* If the size contains a placeholder or the parent type (component type in
4184 the case of arrays) type involves a placeholder, this type does. */
4185 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4186 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4187 || (!POINTER_TYPE_P (type)
4188 && TREE_TYPE (type)
4189 && type_contains_placeholder_p (TREE_TYPE (type))))
4190 return true;
4192 /* Now do type-specific checks. Note that the last part of the check above
4193 greatly limits what we have to do below. */
4194 switch (TREE_CODE (type))
4196 case VOID_TYPE:
4197 case OPAQUE_TYPE:
4198 case COMPLEX_TYPE:
4199 case ENUMERAL_TYPE:
4200 case BOOLEAN_TYPE:
4201 case POINTER_TYPE:
4202 case OFFSET_TYPE:
4203 case REFERENCE_TYPE:
4204 case METHOD_TYPE:
4205 case FUNCTION_TYPE:
4206 case VECTOR_TYPE:
4207 case NULLPTR_TYPE:
4208 return false;
4210 case INTEGER_TYPE:
4211 case REAL_TYPE:
4212 case FIXED_POINT_TYPE:
4213 /* Here we just check the bounds. */
4214 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4215 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4217 case ARRAY_TYPE:
4218 /* We have already checked the component type above, so just check
4219 the domain type. Flexible array members have a null domain. */
4220 return TYPE_DOMAIN (type) ?
4221 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4223 case RECORD_TYPE:
4224 case UNION_TYPE:
4225 case QUAL_UNION_TYPE:
4227 tree field;
4229 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4230 if (TREE_CODE (field) == FIELD_DECL
4231 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4232 || (TREE_CODE (type) == QUAL_UNION_TYPE
4233 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4234 || type_contains_placeholder_p (TREE_TYPE (field))))
4235 return true;
4237 return false;
4240 default:
4241 gcc_unreachable ();
4245 /* Wrapper around above function used to cache its result. */
4247 bool
4248 type_contains_placeholder_p (tree type)
4250 bool result;
4252 /* If the contains_placeholder_bits field has been initialized,
4253 then we know the answer. */
4254 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4255 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4257 /* Indicate that we've seen this type node, and the answer is false.
4258 This is what we want to return if we run into recursion via fields. */
4259 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4261 /* Compute the real value. */
4262 result = type_contains_placeholder_1 (type);
4264 /* Store the real value. */
4265 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4267 return result;
4270 /* Push tree EXP onto vector QUEUE if it is not already present. */
4272 static void
4273 push_without_duplicates (tree exp, vec<tree> *queue)
4275 unsigned int i;
4276 tree iter;
4278 FOR_EACH_VEC_ELT (*queue, i, iter)
4279 if (simple_cst_equal (iter, exp) == 1)
4280 break;
4282 if (!iter)
4283 queue->safe_push (exp);
4286 /* Given a tree EXP, find all occurrences of references to fields
4287 in a PLACEHOLDER_EXPR and place them in vector REFS without
4288 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4289 we assume here that EXP contains only arithmetic expressions
4290 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4291 argument list. */
4293 void
4294 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4296 enum tree_code code = TREE_CODE (exp);
4297 tree inner;
4298 int i;
4300 /* We handle TREE_LIST and COMPONENT_REF separately. */
4301 if (code == TREE_LIST)
4303 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4304 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4306 else if (code == COMPONENT_REF)
4308 for (inner = TREE_OPERAND (exp, 0);
4309 REFERENCE_CLASS_P (inner);
4310 inner = TREE_OPERAND (inner, 0))
4313 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4314 push_without_duplicates (exp, refs);
4315 else
4316 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4318 else
4319 switch (TREE_CODE_CLASS (code))
4321 case tcc_constant:
4322 break;
4324 case tcc_declaration:
4325 /* Variables allocated to static storage can stay. */
4326 if (!TREE_STATIC (exp))
4327 push_without_duplicates (exp, refs);
4328 break;
4330 case tcc_expression:
4331 /* This is the pattern built in ada/make_aligning_type. */
4332 if (code == ADDR_EXPR
4333 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4335 push_without_duplicates (exp, refs);
4336 break;
4339 /* Fall through. */
4341 case tcc_exceptional:
4342 case tcc_unary:
4343 case tcc_binary:
4344 case tcc_comparison:
4345 case tcc_reference:
4346 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4347 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4348 break;
4350 case tcc_vl_exp:
4351 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4352 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4353 break;
4355 default:
4356 gcc_unreachable ();
4360 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4361 return a tree with all occurrences of references to F in a
4362 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4363 CONST_DECLs. Note that we assume here that EXP contains only
4364 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4365 occurring only in their argument list. */
4367 tree
4368 substitute_in_expr (tree exp, tree f, tree r)
4370 enum tree_code code = TREE_CODE (exp);
4371 tree op0, op1, op2, op3;
4372 tree new_tree;
4374 /* We handle TREE_LIST and COMPONENT_REF separately. */
4375 if (code == TREE_LIST)
4377 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4378 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4379 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4380 return exp;
4382 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4384 else if (code == COMPONENT_REF)
4386 tree inner;
4388 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4389 and it is the right field, replace it with R. */
4390 for (inner = TREE_OPERAND (exp, 0);
4391 REFERENCE_CLASS_P (inner);
4392 inner = TREE_OPERAND (inner, 0))
4395 /* The field. */
4396 op1 = TREE_OPERAND (exp, 1);
4398 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4399 return r;
4401 /* If this expression hasn't been completed let, leave it alone. */
4402 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4403 return exp;
4405 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4406 if (op0 == TREE_OPERAND (exp, 0))
4407 return exp;
4409 new_tree
4410 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4412 else
4413 switch (TREE_CODE_CLASS (code))
4415 case tcc_constant:
4416 return exp;
4418 case tcc_declaration:
4419 if (exp == f)
4420 return r;
4421 else
4422 return exp;
4424 case tcc_expression:
4425 if (exp == f)
4426 return r;
4428 /* Fall through. */
4430 case tcc_exceptional:
4431 case tcc_unary:
4432 case tcc_binary:
4433 case tcc_comparison:
4434 case tcc_reference:
4435 switch (TREE_CODE_LENGTH (code))
4437 case 0:
4438 return exp;
4440 case 1:
4441 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4442 if (op0 == TREE_OPERAND (exp, 0))
4443 return exp;
4445 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4446 break;
4448 case 2:
4449 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4450 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4452 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4453 return exp;
4455 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4456 break;
4458 case 3:
4459 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4460 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4461 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4463 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4464 && op2 == TREE_OPERAND (exp, 2))
4465 return exp;
4467 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4468 break;
4470 case 4:
4471 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4472 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4473 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4474 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4476 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4477 && op2 == TREE_OPERAND (exp, 2)
4478 && op3 == TREE_OPERAND (exp, 3))
4479 return exp;
4481 new_tree
4482 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4483 break;
4485 default:
4486 gcc_unreachable ();
4488 break;
4490 case tcc_vl_exp:
4492 int i;
4494 new_tree = NULL_TREE;
4496 /* If we are trying to replace F with a constant or with another
4497 instance of one of the arguments of the call, inline back
4498 functions which do nothing else than computing a value from
4499 the arguments they are passed. This makes it possible to
4500 fold partially or entirely the replacement expression. */
4501 if (code == CALL_EXPR)
4503 bool maybe_inline = false;
4504 if (CONSTANT_CLASS_P (r))
4505 maybe_inline = true;
4506 else
4507 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4508 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4510 maybe_inline = true;
4511 break;
4513 if (maybe_inline)
4515 tree t = maybe_inline_call_in_expr (exp);
4516 if (t)
4517 return SUBSTITUTE_IN_EXPR (t, f, r);
4521 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4523 tree op = TREE_OPERAND (exp, i);
4524 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4525 if (new_op != op)
4527 if (!new_tree)
4528 new_tree = copy_node (exp);
4529 TREE_OPERAND (new_tree, i) = new_op;
4533 if (new_tree)
4535 new_tree = fold (new_tree);
4536 if (TREE_CODE (new_tree) == CALL_EXPR)
4537 process_call_operands (new_tree);
4539 else
4540 return exp;
4542 break;
4544 default:
4545 gcc_unreachable ();
4548 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4550 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4551 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4553 return new_tree;
4556 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4557 for it within OBJ, a tree that is an object or a chain of references. */
4559 tree
4560 substitute_placeholder_in_expr (tree exp, tree obj)
4562 enum tree_code code = TREE_CODE (exp);
4563 tree op0, op1, op2, op3;
4564 tree new_tree;
4566 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4567 in the chain of OBJ. */
4568 if (code == PLACEHOLDER_EXPR)
4570 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4571 tree elt;
4573 for (elt = obj; elt != 0;
4574 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4575 || TREE_CODE (elt) == COND_EXPR)
4576 ? TREE_OPERAND (elt, 1)
4577 : (REFERENCE_CLASS_P (elt)
4578 || UNARY_CLASS_P (elt)
4579 || BINARY_CLASS_P (elt)
4580 || VL_EXP_CLASS_P (elt)
4581 || EXPRESSION_CLASS_P (elt))
4582 ? TREE_OPERAND (elt, 0) : 0))
4583 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4584 return elt;
4586 for (elt = obj; elt != 0;
4587 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4588 || TREE_CODE (elt) == COND_EXPR)
4589 ? TREE_OPERAND (elt, 1)
4590 : (REFERENCE_CLASS_P (elt)
4591 || UNARY_CLASS_P (elt)
4592 || BINARY_CLASS_P (elt)
4593 || VL_EXP_CLASS_P (elt)
4594 || EXPRESSION_CLASS_P (elt))
4595 ? TREE_OPERAND (elt, 0) : 0))
4596 if (POINTER_TYPE_P (TREE_TYPE (elt))
4597 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4598 == need_type))
4599 return fold_build1 (INDIRECT_REF, need_type, elt);
4601 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4602 survives until RTL generation, there will be an error. */
4603 return exp;
4606 /* TREE_LIST is special because we need to look at TREE_VALUE
4607 and TREE_CHAIN, not TREE_OPERANDS. */
4608 else if (code == TREE_LIST)
4610 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4611 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4612 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4613 return exp;
4615 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4617 else
4618 switch (TREE_CODE_CLASS (code))
4620 case tcc_constant:
4621 case tcc_declaration:
4622 return exp;
4624 case tcc_exceptional:
4625 case tcc_unary:
4626 case tcc_binary:
4627 case tcc_comparison:
4628 case tcc_expression:
4629 case tcc_reference:
4630 case tcc_statement:
4631 switch (TREE_CODE_LENGTH (code))
4633 case 0:
4634 return exp;
4636 case 1:
4637 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4638 if (op0 == TREE_OPERAND (exp, 0))
4639 return exp;
4641 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4642 break;
4644 case 2:
4645 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4646 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4648 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4649 return exp;
4651 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4652 break;
4654 case 3:
4655 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4656 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4657 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4659 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4660 && op2 == TREE_OPERAND (exp, 2))
4661 return exp;
4663 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4664 break;
4666 case 4:
4667 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4668 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4669 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4670 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4672 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4673 && op2 == TREE_OPERAND (exp, 2)
4674 && op3 == TREE_OPERAND (exp, 3))
4675 return exp;
4677 new_tree
4678 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4679 break;
4681 default:
4682 gcc_unreachable ();
4684 break;
4686 case tcc_vl_exp:
4688 int i;
4690 new_tree = NULL_TREE;
4692 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4694 tree op = TREE_OPERAND (exp, i);
4695 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4696 if (new_op != op)
4698 if (!new_tree)
4699 new_tree = copy_node (exp);
4700 TREE_OPERAND (new_tree, i) = new_op;
4704 if (new_tree)
4706 new_tree = fold (new_tree);
4707 if (TREE_CODE (new_tree) == CALL_EXPR)
4708 process_call_operands (new_tree);
4710 else
4711 return exp;
4713 break;
4715 default:
4716 gcc_unreachable ();
4719 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4721 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4722 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4724 return new_tree;
4728 /* Subroutine of stabilize_reference; this is called for subtrees of
4729 references. Any expression with side-effects must be put in a SAVE_EXPR
4730 to ensure that it is only evaluated once.
4732 We don't put SAVE_EXPR nodes around everything, because assigning very
4733 simple expressions to temporaries causes us to miss good opportunities
4734 for optimizations. Among other things, the opportunity to fold in the
4735 addition of a constant into an addressing mode often gets lost, e.g.
4736 "y[i+1] += x;". In general, we take the approach that we should not make
4737 an assignment unless we are forced into it - i.e., that any non-side effect
4738 operator should be allowed, and that cse should take care of coalescing
4739 multiple utterances of the same expression should that prove fruitful. */
4741 static tree
4742 stabilize_reference_1 (tree e)
4744 tree result;
4745 enum tree_code code = TREE_CODE (e);
4747 /* We cannot ignore const expressions because it might be a reference
4748 to a const array but whose index contains side-effects. But we can
4749 ignore things that are actual constant or that already have been
4750 handled by this function. */
4752 if (tree_invariant_p (e))
4753 return e;
4755 switch (TREE_CODE_CLASS (code))
4757 case tcc_exceptional:
4758 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4759 have side-effects. */
4760 if (code == STATEMENT_LIST)
4761 return save_expr (e);
4762 /* FALLTHRU */
4763 case tcc_type:
4764 case tcc_declaration:
4765 case tcc_comparison:
4766 case tcc_statement:
4767 case tcc_expression:
4768 case tcc_reference:
4769 case tcc_vl_exp:
4770 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4771 so that it will only be evaluated once. */
4772 /* The reference (r) and comparison (<) classes could be handled as
4773 below, but it is generally faster to only evaluate them once. */
4774 if (TREE_SIDE_EFFECTS (e))
4775 return save_expr (e);
4776 return e;
4778 case tcc_constant:
4779 /* Constants need no processing. In fact, we should never reach
4780 here. */
4781 return e;
4783 case tcc_binary:
4784 /* Division is slow and tends to be compiled with jumps,
4785 especially the division by powers of 2 that is often
4786 found inside of an array reference. So do it just once. */
4787 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4788 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4789 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4790 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4791 return save_expr (e);
4792 /* Recursively stabilize each operand. */
4793 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4794 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4795 break;
4797 case tcc_unary:
4798 /* Recursively stabilize each operand. */
4799 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4800 break;
4802 default:
4803 gcc_unreachable ();
4806 TREE_TYPE (result) = TREE_TYPE (e);
4807 TREE_READONLY (result) = TREE_READONLY (e);
4808 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4809 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4811 return result;
4814 /* Stabilize a reference so that we can use it any number of times
4815 without causing its operands to be evaluated more than once.
4816 Returns the stabilized reference. This works by means of save_expr,
4817 so see the caveats in the comments about save_expr.
4819 Also allows conversion expressions whose operands are references.
4820 Any other kind of expression is returned unchanged. */
4822 tree
4823 stabilize_reference (tree ref)
4825 tree result;
4826 enum tree_code code = TREE_CODE (ref);
4828 switch (code)
4830 case VAR_DECL:
4831 case PARM_DECL:
4832 case RESULT_DECL:
4833 /* No action is needed in this case. */
4834 return ref;
4836 CASE_CONVERT:
4837 case FLOAT_EXPR:
4838 case FIX_TRUNC_EXPR:
4839 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4840 break;
4842 case INDIRECT_REF:
4843 result = build_nt (INDIRECT_REF,
4844 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4845 break;
4847 case COMPONENT_REF:
4848 result = build_nt (COMPONENT_REF,
4849 stabilize_reference (TREE_OPERAND (ref, 0)),
4850 TREE_OPERAND (ref, 1), NULL_TREE);
4851 break;
4853 case BIT_FIELD_REF:
4854 result = build_nt (BIT_FIELD_REF,
4855 stabilize_reference (TREE_OPERAND (ref, 0)),
4856 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4857 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4858 break;
4860 case ARRAY_REF:
4861 result = build_nt (ARRAY_REF,
4862 stabilize_reference (TREE_OPERAND (ref, 0)),
4863 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4864 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4865 break;
4867 case ARRAY_RANGE_REF:
4868 result = build_nt (ARRAY_RANGE_REF,
4869 stabilize_reference (TREE_OPERAND (ref, 0)),
4870 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4871 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4872 break;
4874 case COMPOUND_EXPR:
4875 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4876 it wouldn't be ignored. This matters when dealing with
4877 volatiles. */
4878 return stabilize_reference_1 (ref);
4880 /* If arg isn't a kind of lvalue we recognize, make no change.
4881 Caller should recognize the error for an invalid lvalue. */
4882 default:
4883 return ref;
4885 case ERROR_MARK:
4886 return error_mark_node;
4889 TREE_TYPE (result) = TREE_TYPE (ref);
4890 TREE_READONLY (result) = TREE_READONLY (ref);
4891 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4892 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4893 protected_set_expr_location (result, EXPR_LOCATION (ref));
4895 return result;
4898 /* Low-level constructors for expressions. */
4900 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4901 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4903 void
4904 recompute_tree_invariant_for_addr_expr (tree t)
4906 tree node;
4907 bool tc = true, se = false;
4909 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4911 /* We started out assuming this address is both invariant and constant, but
4912 does not have side effects. Now go down any handled components and see if
4913 any of them involve offsets that are either non-constant or non-invariant.
4914 Also check for side-effects.
4916 ??? Note that this code makes no attempt to deal with the case where
4917 taking the address of something causes a copy due to misalignment. */
4919 #define UPDATE_FLAGS(NODE) \
4920 do { tree _node = (NODE); \
4921 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4922 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4924 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4925 node = TREE_OPERAND (node, 0))
4927 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4928 array reference (probably made temporarily by the G++ front end),
4929 so ignore all the operands. */
4930 if ((TREE_CODE (node) == ARRAY_REF
4931 || TREE_CODE (node) == ARRAY_RANGE_REF)
4932 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4934 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4935 if (TREE_OPERAND (node, 2))
4936 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4937 if (TREE_OPERAND (node, 3))
4938 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4940 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4941 FIELD_DECL, apparently. The G++ front end can put something else
4942 there, at least temporarily. */
4943 else if (TREE_CODE (node) == COMPONENT_REF
4944 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4946 if (TREE_OPERAND (node, 2))
4947 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4951 node = lang_hooks.expr_to_decl (node, &tc, &se);
4953 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4954 the address, since &(*a)->b is a form of addition. If it's a constant, the
4955 address is constant too. If it's a decl, its address is constant if the
4956 decl is static. Everything else is not constant and, furthermore,
4957 taking the address of a volatile variable is not volatile. */
4958 if (INDIRECT_REF_P (node)
4959 || TREE_CODE (node) == MEM_REF)
4960 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4961 else if (CONSTANT_CLASS_P (node))
4963 else if (DECL_P (node))
4964 tc &= (staticp (node) != NULL_TREE);
4965 else
4967 tc = false;
4968 se |= TREE_SIDE_EFFECTS (node);
4972 TREE_CONSTANT (t) = tc;
4973 TREE_SIDE_EFFECTS (t) = se;
4974 #undef UPDATE_FLAGS
4977 /* Build an expression of code CODE, data type TYPE, and operands as
4978 specified. Expressions and reference nodes can be created this way.
4979 Constants, decls, types and misc nodes cannot be.
4981 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4982 enough for all extant tree codes. */
4984 tree
4985 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4987 tree t;
4989 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4991 t = make_node (code PASS_MEM_STAT);
4992 TREE_TYPE (t) = tt;
4994 return t;
4997 tree
4998 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
5000 int length = sizeof (struct tree_exp);
5001 tree t;
5003 record_node_allocation_statistics (code, length);
5005 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5007 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5009 memset (t, 0, sizeof (struct tree_common));
5011 TREE_SET_CODE (t, code);
5013 TREE_TYPE (t) = type;
5014 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5015 TREE_OPERAND (t, 0) = node;
5016 if (node && !TYPE_P (node))
5018 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5019 TREE_READONLY (t) = TREE_READONLY (node);
5022 if (TREE_CODE_CLASS (code) == tcc_statement)
5024 if (code != DEBUG_BEGIN_STMT)
5025 TREE_SIDE_EFFECTS (t) = 1;
5027 else switch (code)
5029 case VA_ARG_EXPR:
5030 /* All of these have side-effects, no matter what their
5031 operands are. */
5032 TREE_SIDE_EFFECTS (t) = 1;
5033 TREE_READONLY (t) = 0;
5034 break;
5036 case INDIRECT_REF:
5037 /* Whether a dereference is readonly has nothing to do with whether
5038 its operand is readonly. */
5039 TREE_READONLY (t) = 0;
5040 break;
5042 case ADDR_EXPR:
5043 if (node)
5044 recompute_tree_invariant_for_addr_expr (t);
5045 break;
5047 default:
5048 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5049 && node && !TYPE_P (node)
5050 && TREE_CONSTANT (node))
5051 TREE_CONSTANT (t) = 1;
5052 if (TREE_CODE_CLASS (code) == tcc_reference
5053 && node && TREE_THIS_VOLATILE (node))
5054 TREE_THIS_VOLATILE (t) = 1;
5055 break;
5058 return t;
5061 #define PROCESS_ARG(N) \
5062 do { \
5063 TREE_OPERAND (t, N) = arg##N; \
5064 if (arg##N &&!TYPE_P (arg##N)) \
5066 if (TREE_SIDE_EFFECTS (arg##N)) \
5067 side_effects = 1; \
5068 if (!TREE_READONLY (arg##N) \
5069 && !CONSTANT_CLASS_P (arg##N)) \
5070 (void) (read_only = 0); \
5071 if (!TREE_CONSTANT (arg##N)) \
5072 (void) (constant = 0); \
5074 } while (0)
5076 tree
5077 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5079 bool constant, read_only, side_effects, div_by_zero;
5080 tree t;
5082 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5084 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5085 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5086 /* When sizetype precision doesn't match that of pointers
5087 we need to be able to build explicit extensions or truncations
5088 of the offset argument. */
5089 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5090 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5091 && TREE_CODE (arg1) == INTEGER_CST);
5093 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5094 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5095 && ptrofftype_p (TREE_TYPE (arg1)));
5097 t = make_node (code PASS_MEM_STAT);
5098 TREE_TYPE (t) = tt;
5100 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5101 result based on those same flags for the arguments. But if the
5102 arguments aren't really even `tree' expressions, we shouldn't be trying
5103 to do this. */
5105 /* Expressions without side effects may be constant if their
5106 arguments are as well. */
5107 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5108 || TREE_CODE_CLASS (code) == tcc_binary);
5109 read_only = 1;
5110 side_effects = TREE_SIDE_EFFECTS (t);
5112 switch (code)
5114 case TRUNC_DIV_EXPR:
5115 case CEIL_DIV_EXPR:
5116 case FLOOR_DIV_EXPR:
5117 case ROUND_DIV_EXPR:
5118 case EXACT_DIV_EXPR:
5119 case CEIL_MOD_EXPR:
5120 case FLOOR_MOD_EXPR:
5121 case ROUND_MOD_EXPR:
5122 case TRUNC_MOD_EXPR:
5123 div_by_zero = integer_zerop (arg1);
5124 break;
5125 default:
5126 div_by_zero = false;
5129 PROCESS_ARG (0);
5130 PROCESS_ARG (1);
5132 TREE_SIDE_EFFECTS (t) = side_effects;
5133 if (code == MEM_REF)
5135 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5137 tree o = TREE_OPERAND (arg0, 0);
5138 TREE_READONLY (t) = TREE_READONLY (o);
5139 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5142 else
5144 TREE_READONLY (t) = read_only;
5145 /* Don't mark X / 0 as constant. */
5146 TREE_CONSTANT (t) = constant && !div_by_zero;
5147 TREE_THIS_VOLATILE (t)
5148 = (TREE_CODE_CLASS (code) == tcc_reference
5149 && arg0 && TREE_THIS_VOLATILE (arg0));
5152 return t;
5156 tree
5157 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5158 tree arg2 MEM_STAT_DECL)
5160 bool constant, read_only, side_effects;
5161 tree t;
5163 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5164 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5166 t = make_node (code PASS_MEM_STAT);
5167 TREE_TYPE (t) = tt;
5169 read_only = 1;
5171 /* As a special exception, if COND_EXPR has NULL branches, we
5172 assume that it is a gimple statement and always consider
5173 it to have side effects. */
5174 if (code == COND_EXPR
5175 && tt == void_type_node
5176 && arg1 == NULL_TREE
5177 && arg2 == NULL_TREE)
5178 side_effects = true;
5179 else
5180 side_effects = TREE_SIDE_EFFECTS (t);
5182 PROCESS_ARG (0);
5183 PROCESS_ARG (1);
5184 PROCESS_ARG (2);
5186 if (code == COND_EXPR)
5187 TREE_READONLY (t) = read_only;
5189 TREE_SIDE_EFFECTS (t) = side_effects;
5190 TREE_THIS_VOLATILE (t)
5191 = (TREE_CODE_CLASS (code) == tcc_reference
5192 && arg0 && TREE_THIS_VOLATILE (arg0));
5194 return t;
5197 tree
5198 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5199 tree arg2, tree arg3 MEM_STAT_DECL)
5201 bool constant, read_only, side_effects;
5202 tree t;
5204 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5206 t = make_node (code PASS_MEM_STAT);
5207 TREE_TYPE (t) = tt;
5209 side_effects = TREE_SIDE_EFFECTS (t);
5211 PROCESS_ARG (0);
5212 PROCESS_ARG (1);
5213 PROCESS_ARG (2);
5214 PROCESS_ARG (3);
5216 TREE_SIDE_EFFECTS (t) = side_effects;
5217 TREE_THIS_VOLATILE (t)
5218 = (TREE_CODE_CLASS (code) == tcc_reference
5219 && arg0 && TREE_THIS_VOLATILE (arg0));
5221 return t;
5224 tree
5225 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5226 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5228 bool constant, read_only, side_effects;
5229 tree t;
5231 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5233 t = make_node (code PASS_MEM_STAT);
5234 TREE_TYPE (t) = tt;
5236 side_effects = TREE_SIDE_EFFECTS (t);
5238 PROCESS_ARG (0);
5239 PROCESS_ARG (1);
5240 PROCESS_ARG (2);
5241 PROCESS_ARG (3);
5242 PROCESS_ARG (4);
5244 TREE_SIDE_EFFECTS (t) = side_effects;
5245 if (code == TARGET_MEM_REF)
5247 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5249 tree o = TREE_OPERAND (arg0, 0);
5250 TREE_READONLY (t) = TREE_READONLY (o);
5251 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5254 else
5255 TREE_THIS_VOLATILE (t)
5256 = (TREE_CODE_CLASS (code) == tcc_reference
5257 && arg0 && TREE_THIS_VOLATILE (arg0));
5259 return t;
5262 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5263 on the pointer PTR. */
5265 tree
5266 build_simple_mem_ref_loc (location_t loc, tree ptr)
5268 poly_int64 offset = 0;
5269 tree ptype = TREE_TYPE (ptr);
5270 tree tem;
5271 /* For convenience allow addresses that collapse to a simple base
5272 and offset. */
5273 if (TREE_CODE (ptr) == ADDR_EXPR
5274 && (handled_component_p (TREE_OPERAND (ptr, 0))
5275 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5277 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5278 gcc_assert (ptr);
5279 if (TREE_CODE (ptr) == MEM_REF)
5281 offset += mem_ref_offset (ptr).force_shwi ();
5282 ptr = TREE_OPERAND (ptr, 0);
5284 else
5285 ptr = build_fold_addr_expr (ptr);
5286 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5288 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5289 ptr, build_int_cst (ptype, offset));
5290 SET_EXPR_LOCATION (tem, loc);
5291 return tem;
5294 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5296 poly_offset_int
5297 mem_ref_offset (const_tree t)
5299 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5300 SIGNED);
5303 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5304 offsetted by OFFSET units. */
5306 tree
5307 build_invariant_address (tree type, tree base, poly_int64 offset)
5309 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5310 build_fold_addr_expr (base),
5311 build_int_cst (ptr_type_node, offset));
5312 tree addr = build1 (ADDR_EXPR, type, ref);
5313 recompute_tree_invariant_for_addr_expr (addr);
5314 return addr;
5317 /* Similar except don't specify the TREE_TYPE
5318 and leave the TREE_SIDE_EFFECTS as 0.
5319 It is permissible for arguments to be null,
5320 or even garbage if their values do not matter. */
5322 tree
5323 build_nt (enum tree_code code, ...)
5325 tree t;
5326 int length;
5327 int i;
5328 va_list p;
5330 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5332 va_start (p, code);
5334 t = make_node (code);
5335 length = TREE_CODE_LENGTH (code);
5337 for (i = 0; i < length; i++)
5338 TREE_OPERAND (t, i) = va_arg (p, tree);
5340 va_end (p);
5341 return t;
5344 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5345 tree vec. */
5347 tree
5348 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5350 tree ret, t;
5351 unsigned int ix;
5353 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5354 CALL_EXPR_FN (ret) = fn;
5355 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5356 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5357 CALL_EXPR_ARG (ret, ix) = t;
5358 return ret;
5361 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5362 and data type TYPE.
5363 We do NOT enter this node in any sort of symbol table.
5365 LOC is the location of the decl.
5367 layout_decl is used to set up the decl's storage layout.
5368 Other slots are initialized to 0 or null pointers. */
5370 tree
5371 build_decl (location_t loc, enum tree_code code, tree name,
5372 tree type MEM_STAT_DECL)
5374 tree t;
5376 t = make_node (code PASS_MEM_STAT);
5377 DECL_SOURCE_LOCATION (t) = loc;
5379 /* if (type == error_mark_node)
5380 type = integer_type_node; */
5381 /* That is not done, deliberately, so that having error_mark_node
5382 as the type can suppress useless errors in the use of this variable. */
5384 DECL_NAME (t) = name;
5385 TREE_TYPE (t) = type;
5387 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5388 layout_decl (t, 0);
5390 return t;
5393 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5395 tree
5396 build_debug_expr_decl (tree type)
5398 tree vexpr = make_node (DEBUG_EXPR_DECL);
5399 DECL_ARTIFICIAL (vexpr) = 1;
5400 TREE_TYPE (vexpr) = type;
5401 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5402 return vexpr;
5405 /* Builds and returns function declaration with NAME and TYPE. */
5407 tree
5408 build_fn_decl (const char *name, tree type)
5410 tree id = get_identifier (name);
5411 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5413 DECL_EXTERNAL (decl) = 1;
5414 TREE_PUBLIC (decl) = 1;
5415 DECL_ARTIFICIAL (decl) = 1;
5416 TREE_NOTHROW (decl) = 1;
5418 return decl;
5421 vec<tree, va_gc> *all_translation_units;
5423 /* Builds a new translation-unit decl with name NAME, queues it in the
5424 global list of translation-unit decls and returns it. */
5426 tree
5427 build_translation_unit_decl (tree name)
5429 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5430 name, NULL_TREE);
5431 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5432 vec_safe_push (all_translation_units, tu);
5433 return tu;
5437 /* BLOCK nodes are used to represent the structure of binding contours
5438 and declarations, once those contours have been exited and their contents
5439 compiled. This information is used for outputting debugging info. */
5441 tree
5442 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5444 tree block = make_node (BLOCK);
5446 BLOCK_VARS (block) = vars;
5447 BLOCK_SUBBLOCKS (block) = subblocks;
5448 BLOCK_SUPERCONTEXT (block) = supercontext;
5449 BLOCK_CHAIN (block) = chain;
5450 return block;
5454 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5456 LOC is the location to use in tree T. */
5458 void
5459 protected_set_expr_location (tree t, location_t loc)
5461 if (CAN_HAVE_LOCATION_P (t))
5462 SET_EXPR_LOCATION (t, loc);
5463 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5465 t = expr_single (t);
5466 if (t && CAN_HAVE_LOCATION_P (t))
5467 SET_EXPR_LOCATION (t, loc);
5471 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5472 UNKNOWN_LOCATION. */
5474 void
5475 protected_set_expr_location_if_unset (tree t, location_t loc)
5477 t = expr_single (t);
5478 if (t && !EXPR_HAS_LOCATION (t))
5479 protected_set_expr_location (t, loc);
5482 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5483 of the various TYPE_QUAL values. */
5485 static void
5486 set_type_quals (tree type, int type_quals)
5488 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5489 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5490 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5491 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5492 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5495 /* Returns true iff CAND and BASE have equivalent language-specific
5496 qualifiers. */
5498 bool
5499 check_lang_type (const_tree cand, const_tree base)
5501 if (lang_hooks.types.type_hash_eq == NULL)
5502 return true;
5503 /* type_hash_eq currently only applies to these types. */
5504 if (TREE_CODE (cand) != FUNCTION_TYPE
5505 && TREE_CODE (cand) != METHOD_TYPE)
5506 return true;
5507 return lang_hooks.types.type_hash_eq (cand, base);
5510 /* This function checks to see if TYPE matches the size one of the built-in
5511 atomic types, and returns that core atomic type. */
5513 static tree
5514 find_atomic_core_type (const_tree type)
5516 tree base_atomic_type;
5518 /* Only handle complete types. */
5519 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5520 return NULL_TREE;
5522 switch (tree_to_uhwi (TYPE_SIZE (type)))
5524 case 8:
5525 base_atomic_type = atomicQI_type_node;
5526 break;
5528 case 16:
5529 base_atomic_type = atomicHI_type_node;
5530 break;
5532 case 32:
5533 base_atomic_type = atomicSI_type_node;
5534 break;
5536 case 64:
5537 base_atomic_type = atomicDI_type_node;
5538 break;
5540 case 128:
5541 base_atomic_type = atomicTI_type_node;
5542 break;
5544 default:
5545 base_atomic_type = NULL_TREE;
5548 return base_atomic_type;
5551 /* Returns true iff unqualified CAND and BASE are equivalent. */
5553 bool
5554 check_base_type (const_tree cand, const_tree base)
5556 if (TYPE_NAME (cand) != TYPE_NAME (base)
5557 /* Apparently this is needed for Objective-C. */
5558 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5559 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5560 TYPE_ATTRIBUTES (base)))
5561 return false;
5562 /* Check alignment. */
5563 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5564 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5565 return true;
5566 /* Atomic types increase minimal alignment. We must to do so as well
5567 or we get duplicated canonical types. See PR88686. */
5568 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5570 /* See if this object can map to a basic atomic type. */
5571 tree atomic_type = find_atomic_core_type (cand);
5572 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5573 return true;
5575 return false;
5578 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5580 bool
5581 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5583 return (TYPE_QUALS (cand) == type_quals
5584 && check_base_type (cand, base)
5585 && check_lang_type (cand, base));
5588 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5590 static bool
5591 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5593 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5594 && TYPE_NAME (cand) == TYPE_NAME (base)
5595 /* Apparently this is needed for Objective-C. */
5596 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5597 /* Check alignment. */
5598 && TYPE_ALIGN (cand) == align
5599 /* Check this is a user-aligned type as build_aligned_type
5600 would create. */
5601 && TYPE_USER_ALIGN (cand)
5602 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5603 TYPE_ATTRIBUTES (base))
5604 && check_lang_type (cand, base));
5607 /* Return a version of the TYPE, qualified as indicated by the
5608 TYPE_QUALS, if one exists. If no qualified version exists yet,
5609 return NULL_TREE. */
5611 tree
5612 get_qualified_type (tree type, int type_quals)
5614 if (TYPE_QUALS (type) == type_quals)
5615 return type;
5617 tree mv = TYPE_MAIN_VARIANT (type);
5618 if (check_qualified_type (mv, type, type_quals))
5619 return mv;
5621 /* Search the chain of variants to see if there is already one there just
5622 like the one we need to have. If so, use that existing one. We must
5623 preserve the TYPE_NAME, since there is code that depends on this. */
5624 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5625 if (check_qualified_type (*tp, type, type_quals))
5627 /* Put the found variant at the head of the variant list so
5628 frequently searched variants get found faster. The C++ FE
5629 benefits greatly from this. */
5630 tree t = *tp;
5631 *tp = TYPE_NEXT_VARIANT (t);
5632 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5633 TYPE_NEXT_VARIANT (mv) = t;
5634 return t;
5637 return NULL_TREE;
5640 /* Like get_qualified_type, but creates the type if it does not
5641 exist. This function never returns NULL_TREE. */
5643 tree
5644 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5646 tree t;
5648 /* See if we already have the appropriate qualified variant. */
5649 t = get_qualified_type (type, type_quals);
5651 /* If not, build it. */
5652 if (!t)
5654 t = build_variant_type_copy (type PASS_MEM_STAT);
5655 set_type_quals (t, type_quals);
5657 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5659 /* See if this object can map to a basic atomic type. */
5660 tree atomic_type = find_atomic_core_type (type);
5661 if (atomic_type)
5663 /* Ensure the alignment of this type is compatible with
5664 the required alignment of the atomic type. */
5665 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5666 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5670 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5671 /* Propagate structural equality. */
5672 SET_TYPE_STRUCTURAL_EQUALITY (t);
5673 else if (TYPE_CANONICAL (type) != type)
5674 /* Build the underlying canonical type, since it is different
5675 from TYPE. */
5677 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5678 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5680 else
5681 /* T is its own canonical type. */
5682 TYPE_CANONICAL (t) = t;
5686 return t;
5689 /* Create a variant of type T with alignment ALIGN. */
5691 tree
5692 build_aligned_type (tree type, unsigned int align)
5694 tree t;
5696 if (TYPE_PACKED (type)
5697 || TYPE_ALIGN (type) == align)
5698 return type;
5700 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5701 if (check_aligned_type (t, type, align))
5702 return t;
5704 t = build_variant_type_copy (type);
5705 SET_TYPE_ALIGN (t, align);
5706 TYPE_USER_ALIGN (t) = 1;
5708 return t;
5711 /* Create a new distinct copy of TYPE. The new type is made its own
5712 MAIN_VARIANT. If TYPE requires structural equality checks, the
5713 resulting type requires structural equality checks; otherwise, its
5714 TYPE_CANONICAL points to itself. */
5716 tree
5717 build_distinct_type_copy (tree type MEM_STAT_DECL)
5719 tree t = copy_node (type PASS_MEM_STAT);
5721 TYPE_POINTER_TO (t) = 0;
5722 TYPE_REFERENCE_TO (t) = 0;
5724 /* Set the canonical type either to a new equivalence class, or
5725 propagate the need for structural equality checks. */
5726 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5727 SET_TYPE_STRUCTURAL_EQUALITY (t);
5728 else
5729 TYPE_CANONICAL (t) = t;
5731 /* Make it its own variant. */
5732 TYPE_MAIN_VARIANT (t) = t;
5733 TYPE_NEXT_VARIANT (t) = 0;
5735 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5736 whose TREE_TYPE is not t. This can also happen in the Ada
5737 frontend when using subtypes. */
5739 return t;
5742 /* Create a new variant of TYPE, equivalent but distinct. This is so
5743 the caller can modify it. TYPE_CANONICAL for the return type will
5744 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5745 are considered equal by the language itself (or that both types
5746 require structural equality checks). */
5748 tree
5749 build_variant_type_copy (tree type MEM_STAT_DECL)
5751 tree t, m = TYPE_MAIN_VARIANT (type);
5753 t = build_distinct_type_copy (type PASS_MEM_STAT);
5755 /* Since we're building a variant, assume that it is a non-semantic
5756 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5757 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5758 /* Type variants have no alias set defined. */
5759 TYPE_ALIAS_SET (t) = -1;
5761 /* Add the new type to the chain of variants of TYPE. */
5762 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5763 TYPE_NEXT_VARIANT (m) = t;
5764 TYPE_MAIN_VARIANT (t) = m;
5766 return t;
5769 /* Return true if the from tree in both tree maps are equal. */
5772 tree_map_base_eq (const void *va, const void *vb)
5774 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5775 *const b = (const struct tree_map_base *) vb;
5776 return (a->from == b->from);
5779 /* Hash a from tree in a tree_base_map. */
5781 unsigned int
5782 tree_map_base_hash (const void *item)
5784 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5787 /* Return true if this tree map structure is marked for garbage collection
5788 purposes. We simply return true if the from tree is marked, so that this
5789 structure goes away when the from tree goes away. */
5791 bool
5792 tree_map_base_marked_p (const void *p)
5794 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5797 /* Hash a from tree in a tree_map. */
5799 unsigned int
5800 tree_map_hash (const void *item)
5802 return (((const struct tree_map *) item)->hash);
5805 /* Hash a from tree in a tree_decl_map. */
5807 unsigned int
5808 tree_decl_map_hash (const void *item)
5810 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5813 /* Return the initialization priority for DECL. */
5815 priority_type
5816 decl_init_priority_lookup (tree decl)
5818 symtab_node *snode = symtab_node::get (decl);
5820 if (!snode)
5821 return DEFAULT_INIT_PRIORITY;
5822 return
5823 snode->get_init_priority ();
5826 /* Return the finalization priority for DECL. */
5828 priority_type
5829 decl_fini_priority_lookup (tree decl)
5831 cgraph_node *node = cgraph_node::get (decl);
5833 if (!node)
5834 return DEFAULT_INIT_PRIORITY;
5835 return
5836 node->get_fini_priority ();
5839 /* Set the initialization priority for DECL to PRIORITY. */
5841 void
5842 decl_init_priority_insert (tree decl, priority_type priority)
5844 struct symtab_node *snode;
5846 if (priority == DEFAULT_INIT_PRIORITY)
5848 snode = symtab_node::get (decl);
5849 if (!snode)
5850 return;
5852 else if (VAR_P (decl))
5853 snode = varpool_node::get_create (decl);
5854 else
5855 snode = cgraph_node::get_create (decl);
5856 snode->set_init_priority (priority);
5859 /* Set the finalization priority for DECL to PRIORITY. */
5861 void
5862 decl_fini_priority_insert (tree decl, priority_type priority)
5864 struct cgraph_node *node;
5866 if (priority == DEFAULT_INIT_PRIORITY)
5868 node = cgraph_node::get (decl);
5869 if (!node)
5870 return;
5872 else
5873 node = cgraph_node::get_create (decl);
5874 node->set_fini_priority (priority);
5877 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5879 static void
5880 print_debug_expr_statistics (void)
5882 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5883 (long) debug_expr_for_decl->size (),
5884 (long) debug_expr_for_decl->elements (),
5885 debug_expr_for_decl->collisions ());
5888 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5890 static void
5891 print_value_expr_statistics (void)
5893 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5894 (long) value_expr_for_decl->size (),
5895 (long) value_expr_for_decl->elements (),
5896 value_expr_for_decl->collisions ());
5899 /* Lookup a debug expression for FROM, and return it if we find one. */
5901 tree
5902 decl_debug_expr_lookup (tree from)
5904 struct tree_decl_map *h, in;
5905 in.base.from = from;
5907 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5908 if (h)
5909 return h->to;
5910 return NULL_TREE;
5913 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5915 void
5916 decl_debug_expr_insert (tree from, tree to)
5918 struct tree_decl_map *h;
5920 h = ggc_alloc<tree_decl_map> ();
5921 h->base.from = from;
5922 h->to = to;
5923 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5926 /* Lookup a value expression for FROM, and return it if we find one. */
5928 tree
5929 decl_value_expr_lookup (tree from)
5931 struct tree_decl_map *h, in;
5932 in.base.from = from;
5934 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5935 if (h)
5936 return h->to;
5937 return NULL_TREE;
5940 /* Insert a mapping FROM->TO in the value expression hashtable. */
5942 void
5943 decl_value_expr_insert (tree from, tree to)
5945 struct tree_decl_map *h;
5947 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5948 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5950 h = ggc_alloc<tree_decl_map> ();
5951 h->base.from = from;
5952 h->to = to;
5953 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5956 /* Lookup a vector of debug arguments for FROM, and return it if we
5957 find one. */
5959 vec<tree, va_gc> **
5960 decl_debug_args_lookup (tree from)
5962 struct tree_vec_map *h, in;
5964 if (!DECL_HAS_DEBUG_ARGS_P (from))
5965 return NULL;
5966 gcc_checking_assert (debug_args_for_decl != NULL);
5967 in.base.from = from;
5968 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5969 if (h)
5970 return &h->to;
5971 return NULL;
5974 /* Insert a mapping FROM->empty vector of debug arguments in the value
5975 expression hashtable. */
5977 vec<tree, va_gc> **
5978 decl_debug_args_insert (tree from)
5980 struct tree_vec_map *h;
5981 tree_vec_map **loc;
5983 if (DECL_HAS_DEBUG_ARGS_P (from))
5984 return decl_debug_args_lookup (from);
5985 if (debug_args_for_decl == NULL)
5986 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5987 h = ggc_alloc<tree_vec_map> ();
5988 h->base.from = from;
5989 h->to = NULL;
5990 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5991 *loc = h;
5992 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5993 return &h->to;
5996 /* Hashing of types so that we don't make duplicates.
5997 The entry point is `type_hash_canon'. */
5999 /* Generate the default hash code for TYPE. This is designed for
6000 speed, rather than maximum entropy. */
6002 hashval_t
6003 type_hash_canon_hash (tree type)
6005 inchash::hash hstate;
6007 hstate.add_int (TREE_CODE (type));
6009 if (TREE_TYPE (type))
6010 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6012 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6013 /* Just the identifier is adequate to distinguish. */
6014 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6016 switch (TREE_CODE (type))
6018 case METHOD_TYPE:
6019 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6020 /* FALLTHROUGH. */
6021 case FUNCTION_TYPE:
6022 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6023 if (TREE_VALUE (t) != error_mark_node)
6024 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6025 break;
6027 case OFFSET_TYPE:
6028 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6029 break;
6031 case ARRAY_TYPE:
6033 if (TYPE_DOMAIN (type))
6034 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6035 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6037 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6038 hstate.add_object (typeless);
6041 break;
6043 case INTEGER_TYPE:
6045 tree t = TYPE_MAX_VALUE (type);
6046 if (!t)
6047 t = TYPE_MIN_VALUE (type);
6048 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6049 hstate.add_object (TREE_INT_CST_ELT (t, i));
6050 break;
6053 case BITINT_TYPE:
6055 unsigned prec = TYPE_PRECISION (type);
6056 unsigned uns = TYPE_UNSIGNED (type);
6057 hstate.add_object (prec);
6058 hstate.add_int (uns);
6059 break;
6062 case REAL_TYPE:
6063 case FIXED_POINT_TYPE:
6065 unsigned prec = TYPE_PRECISION (type);
6066 hstate.add_object (prec);
6067 break;
6070 case VECTOR_TYPE:
6071 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6072 break;
6074 default:
6075 break;
6078 return hstate.end ();
6081 /* These are the Hashtable callback functions. */
6083 /* Returns true iff the types are equivalent. */
6085 bool
6086 type_cache_hasher::equal (type_hash *a, type_hash *b)
6088 /* First test the things that are the same for all types. */
6089 if (a->hash != b->hash
6090 || TREE_CODE (a->type) != TREE_CODE (b->type)
6091 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6092 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6093 TYPE_ATTRIBUTES (b->type))
6094 || (TREE_CODE (a->type) != COMPLEX_TYPE
6095 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6096 return false;
6098 /* Be careful about comparing arrays before and after the element type
6099 has been completed; don't compare TYPE_ALIGN unless both types are
6100 complete. */
6101 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6102 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6103 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6104 return false;
6106 switch (TREE_CODE (a->type))
6108 case VOID_TYPE:
6109 case OPAQUE_TYPE:
6110 case COMPLEX_TYPE:
6111 case POINTER_TYPE:
6112 case REFERENCE_TYPE:
6113 case NULLPTR_TYPE:
6114 return true;
6116 case VECTOR_TYPE:
6117 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6118 TYPE_VECTOR_SUBPARTS (b->type));
6120 case ENUMERAL_TYPE:
6121 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6122 && !(TYPE_VALUES (a->type)
6123 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6124 && TYPE_VALUES (b->type)
6125 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6126 && type_list_equal (TYPE_VALUES (a->type),
6127 TYPE_VALUES (b->type))))
6128 return false;
6130 /* fall through */
6132 case INTEGER_TYPE:
6133 case REAL_TYPE:
6134 case BOOLEAN_TYPE:
6135 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6136 return false;
6137 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6138 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6139 TYPE_MAX_VALUE (b->type)))
6140 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6141 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6142 TYPE_MIN_VALUE (b->type))));
6144 case BITINT_TYPE:
6145 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6146 return false;
6147 return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
6149 case FIXED_POINT_TYPE:
6150 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6152 case OFFSET_TYPE:
6153 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6155 case METHOD_TYPE:
6156 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6157 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6158 || (TYPE_ARG_TYPES (a->type)
6159 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6160 && TYPE_ARG_TYPES (b->type)
6161 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6162 && type_list_equal (TYPE_ARG_TYPES (a->type),
6163 TYPE_ARG_TYPES (b->type)))))
6164 break;
6165 return false;
6166 case ARRAY_TYPE:
6167 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6168 where the flag should be inherited from the element type
6169 and can change after ARRAY_TYPEs are created; on non-aggregates
6170 compare it and hash it, scalars will never have that flag set
6171 and we need to differentiate between arrays created by different
6172 front-ends or middle-end created arrays. */
6173 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6174 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6175 || (TYPE_TYPELESS_STORAGE (a->type)
6176 == TYPE_TYPELESS_STORAGE (b->type))));
6178 case RECORD_TYPE:
6179 case UNION_TYPE:
6180 case QUAL_UNION_TYPE:
6181 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6182 || (TYPE_FIELDS (a->type)
6183 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6184 && TYPE_FIELDS (b->type)
6185 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6186 && type_list_equal (TYPE_FIELDS (a->type),
6187 TYPE_FIELDS (b->type))));
6189 case FUNCTION_TYPE:
6190 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6191 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6192 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6193 || (TYPE_ARG_TYPES (a->type)
6194 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6195 && TYPE_ARG_TYPES (b->type)
6196 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6197 && type_list_equal (TYPE_ARG_TYPES (a->type),
6198 TYPE_ARG_TYPES (b->type))))
6199 break;
6200 return false;
6202 default:
6203 return false;
6206 if (lang_hooks.types.type_hash_eq != NULL)
6207 return lang_hooks.types.type_hash_eq (a->type, b->type);
6209 return true;
6212 /* Given TYPE, and HASHCODE its hash code, return the canonical
6213 object for an identical type if one already exists.
6214 Otherwise, return TYPE, and record it as the canonical object.
6216 To use this function, first create a type of the sort you want.
6217 Then compute its hash code from the fields of the type that
6218 make it different from other similar types.
6219 Then call this function and use the value. */
6221 tree
6222 type_hash_canon (unsigned int hashcode, tree type)
6224 type_hash in;
6225 type_hash **loc;
6227 /* The hash table only contains main variants, so ensure that's what we're
6228 being passed. */
6229 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6231 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6232 must call that routine before comparing TYPE_ALIGNs. */
6233 layout_type (type);
6235 in.hash = hashcode;
6236 in.type = type;
6238 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6239 if (*loc)
6241 tree t1 = ((type_hash *) *loc)->type;
6242 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6243 && t1 != type);
6244 if (TYPE_UID (type) + 1 == next_type_uid)
6245 --next_type_uid;
6246 /* Free also min/max values and the cache for integer
6247 types. This can't be done in free_node, as LTO frees
6248 those on its own. */
6249 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
6251 if (TYPE_MIN_VALUE (type)
6252 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6254 /* Zero is always in TYPE_CACHED_VALUES. */
6255 if (! TYPE_UNSIGNED (type))
6256 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6257 ggc_free (TYPE_MIN_VALUE (type));
6259 if (TYPE_MAX_VALUE (type)
6260 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6262 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6263 ggc_free (TYPE_MAX_VALUE (type));
6265 if (TYPE_CACHED_VALUES_P (type))
6266 ggc_free (TYPE_CACHED_VALUES (type));
6268 free_node (type);
6269 return t1;
6271 else
6273 struct type_hash *h;
6275 h = ggc_alloc<type_hash> ();
6276 h->hash = hashcode;
6277 h->type = type;
6278 *loc = h;
6280 return type;
6284 static void
6285 print_type_hash_statistics (void)
6287 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6288 (long) type_hash_table->size (),
6289 (long) type_hash_table->elements (),
6290 type_hash_table->collisions ());
6293 /* Given two lists of types
6294 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6295 return 1 if the lists contain the same types in the same order.
6296 Also, the TREE_PURPOSEs must match. */
6298 bool
6299 type_list_equal (const_tree l1, const_tree l2)
6301 const_tree t1, t2;
6303 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6304 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6305 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6306 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6307 && (TREE_TYPE (TREE_PURPOSE (t1))
6308 == TREE_TYPE (TREE_PURPOSE (t2))))))
6309 return false;
6311 return t1 == t2;
6314 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6315 given by TYPE. If the argument list accepts variable arguments,
6316 then this function counts only the ordinary arguments. */
6319 type_num_arguments (const_tree fntype)
6321 int i = 0;
6323 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6324 /* If the function does not take a variable number of arguments,
6325 the last element in the list will have type `void'. */
6326 if (VOID_TYPE_P (TREE_VALUE (t)))
6327 break;
6328 else
6329 ++i;
6331 return i;
6334 /* Return the type of the function TYPE's argument ARGNO if known.
6335 For vararg function's where ARGNO refers to one of the variadic
6336 arguments return null. Otherwise, return a void_type_node for
6337 out-of-bounds ARGNO. */
6339 tree
6340 type_argument_type (const_tree fntype, unsigned argno)
6342 /* Treat zero the same as an out-of-bounds argument number. */
6343 if (!argno)
6344 return void_type_node;
6346 function_args_iterator iter;
6348 tree argtype;
6349 unsigned i = 1;
6350 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6352 /* A vararg function's argument list ends in a null. Otherwise,
6353 an ordinary function's argument list ends with void. Return
6354 null if ARGNO refers to a vararg argument, void_type_node if
6355 it's out of bounds, and the formal argument type otherwise. */
6356 if (!argtype)
6357 break;
6359 if (i == argno || VOID_TYPE_P (argtype))
6360 return argtype;
6362 ++i;
6365 return NULL_TREE;
6368 /* True if integer constants T1 and T2
6369 represent the same constant value. */
6371 bool
6372 tree_int_cst_equal (const_tree t1, const_tree t2)
6374 if (t1 == t2)
6375 return true;
6377 if (t1 == 0 || t2 == 0)
6378 return false;
6380 STRIP_ANY_LOCATION_WRAPPER (t1);
6381 STRIP_ANY_LOCATION_WRAPPER (t2);
6383 if (TREE_CODE (t1) == INTEGER_CST
6384 && TREE_CODE (t2) == INTEGER_CST
6385 && wi::to_widest (t1) == wi::to_widest (t2))
6386 return true;
6388 return false;
6391 /* Return true if T is an INTEGER_CST whose numerical value (extended
6392 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6394 bool
6395 tree_fits_shwi_p (const_tree t)
6397 return (t != NULL_TREE
6398 && TREE_CODE (t) == INTEGER_CST
6399 && wi::fits_shwi_p (wi::to_widest (t)));
6402 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6403 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6405 bool
6406 tree_fits_poly_int64_p (const_tree t)
6408 if (t == NULL_TREE)
6409 return false;
6410 if (POLY_INT_CST_P (t))
6412 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6413 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6414 return false;
6415 return true;
6417 return (TREE_CODE (t) == INTEGER_CST
6418 && wi::fits_shwi_p (wi::to_widest (t)));
6421 /* Return true if T is an INTEGER_CST whose numerical value (extended
6422 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6424 bool
6425 tree_fits_uhwi_p (const_tree t)
6427 return (t != NULL_TREE
6428 && TREE_CODE (t) == INTEGER_CST
6429 && wi::fits_uhwi_p (wi::to_widest (t)));
6432 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6433 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6435 bool
6436 tree_fits_poly_uint64_p (const_tree t)
6438 if (t == NULL_TREE)
6439 return false;
6440 if (POLY_INT_CST_P (t))
6442 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6443 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6444 return false;
6445 return true;
6447 return (TREE_CODE (t) == INTEGER_CST
6448 && wi::fits_uhwi_p (wi::to_widest (t)));
6451 /* T is an INTEGER_CST whose numerical value (extended according to
6452 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6453 HOST_WIDE_INT. */
6455 HOST_WIDE_INT
6456 tree_to_shwi (const_tree t)
6458 gcc_assert (tree_fits_shwi_p (t));
6459 return TREE_INT_CST_LOW (t);
6462 /* T is an INTEGER_CST whose numerical value (extended according to
6463 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6464 HOST_WIDE_INT. */
6466 unsigned HOST_WIDE_INT
6467 tree_to_uhwi (const_tree t)
6469 gcc_assert (tree_fits_uhwi_p (t));
6470 return TREE_INT_CST_LOW (t);
6473 /* Return the most significant (sign) bit of T. */
6476 tree_int_cst_sign_bit (const_tree t)
6478 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6480 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6483 /* Return an indication of the sign of the integer constant T.
6484 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6485 Note that -1 will never be returned if T's type is unsigned. */
6488 tree_int_cst_sgn (const_tree t)
6490 if (wi::to_wide (t) == 0)
6491 return 0;
6492 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6493 return 1;
6494 else if (wi::neg_p (wi::to_wide (t)))
6495 return -1;
6496 else
6497 return 1;
6500 /* Return the minimum number of bits needed to represent VALUE in a
6501 signed or unsigned type, UNSIGNEDP says which. */
6503 unsigned int
6504 tree_int_cst_min_precision (tree value, signop sgn)
6506 /* If the value is negative, compute its negative minus 1. The latter
6507 adjustment is because the absolute value of the largest negative value
6508 is one larger than the largest positive value. This is equivalent to
6509 a bit-wise negation, so use that operation instead. */
6511 if (tree_int_cst_sgn (value) < 0)
6512 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6514 /* Return the number of bits needed, taking into account the fact
6515 that we need one more bit for a signed than unsigned type.
6516 If value is 0 or -1, the minimum precision is 1 no matter
6517 whether unsignedp is true or false. */
6519 if (integer_zerop (value))
6520 return 1;
6521 else
6522 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6525 /* Return truthvalue of whether T1 is the same tree structure as T2.
6526 Return 1 if they are the same.
6527 Return 0 if they are understandably different.
6528 Return -1 if either contains tree structure not understood by
6529 this function. */
6532 simple_cst_equal (const_tree t1, const_tree t2)
6534 enum tree_code code1, code2;
6535 int cmp;
6536 int i;
6538 if (t1 == t2)
6539 return 1;
6540 if (t1 == 0 || t2 == 0)
6541 return 0;
6543 /* For location wrappers to be the same, they must be at the same
6544 source location (and wrap the same thing). */
6545 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6547 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6548 return 0;
6549 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6552 code1 = TREE_CODE (t1);
6553 code2 = TREE_CODE (t2);
6555 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6557 if (CONVERT_EXPR_CODE_P (code2)
6558 || code2 == NON_LVALUE_EXPR)
6559 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6560 else
6561 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6564 else if (CONVERT_EXPR_CODE_P (code2)
6565 || code2 == NON_LVALUE_EXPR)
6566 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6568 if (code1 != code2)
6569 return 0;
6571 switch (code1)
6573 case INTEGER_CST:
6574 return wi::to_widest (t1) == wi::to_widest (t2);
6576 case REAL_CST:
6577 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6579 case FIXED_CST:
6580 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6582 case STRING_CST:
6583 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6584 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6585 TREE_STRING_LENGTH (t1)));
6587 case CONSTRUCTOR:
6589 unsigned HOST_WIDE_INT idx;
6590 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6591 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6593 if (vec_safe_length (v1) != vec_safe_length (v2))
6594 return false;
6596 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6597 /* ??? Should we handle also fields here? */
6598 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6599 return false;
6600 return true;
6603 case SAVE_EXPR:
6604 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6606 case CALL_EXPR:
6607 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6608 if (cmp <= 0)
6609 return cmp;
6610 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6611 return 0;
6613 const_tree arg1, arg2;
6614 const_call_expr_arg_iterator iter1, iter2;
6615 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6616 arg2 = first_const_call_expr_arg (t2, &iter2);
6617 arg1 && arg2;
6618 arg1 = next_const_call_expr_arg (&iter1),
6619 arg2 = next_const_call_expr_arg (&iter2))
6621 cmp = simple_cst_equal (arg1, arg2);
6622 if (cmp <= 0)
6623 return cmp;
6625 return arg1 == arg2;
6628 case TARGET_EXPR:
6629 /* Special case: if either target is an unallocated VAR_DECL,
6630 it means that it's going to be unified with whatever the
6631 TARGET_EXPR is really supposed to initialize, so treat it
6632 as being equivalent to anything. */
6633 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6634 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6635 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6636 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6637 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6638 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6639 cmp = 1;
6640 else
6641 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6643 if (cmp <= 0)
6644 return cmp;
6646 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6648 case WITH_CLEANUP_EXPR:
6649 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6650 if (cmp <= 0)
6651 return cmp;
6653 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6655 case COMPONENT_REF:
6656 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6657 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6659 return 0;
6661 case VAR_DECL:
6662 case PARM_DECL:
6663 case CONST_DECL:
6664 case FUNCTION_DECL:
6665 return 0;
6667 default:
6668 if (POLY_INT_CST_P (t1))
6669 /* A false return means maybe_ne rather than known_ne. */
6670 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6671 TYPE_SIGN (TREE_TYPE (t1))),
6672 poly_widest_int::from (poly_int_cst_value (t2),
6673 TYPE_SIGN (TREE_TYPE (t2))));
6674 break;
6677 /* This general rule works for most tree codes. All exceptions should be
6678 handled above. If this is a language-specific tree code, we can't
6679 trust what might be in the operand, so say we don't know
6680 the situation. */
6681 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6682 return -1;
6684 switch (TREE_CODE_CLASS (code1))
6686 case tcc_unary:
6687 case tcc_binary:
6688 case tcc_comparison:
6689 case tcc_expression:
6690 case tcc_reference:
6691 case tcc_statement:
6692 cmp = 1;
6693 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6695 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6696 if (cmp <= 0)
6697 return cmp;
6700 return cmp;
6702 default:
6703 return -1;
6707 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6708 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6709 than U, respectively. */
6712 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6714 if (tree_int_cst_sgn (t) < 0)
6715 return -1;
6716 else if (!tree_fits_uhwi_p (t))
6717 return 1;
6718 else if (TREE_INT_CST_LOW (t) == u)
6719 return 0;
6720 else if (TREE_INT_CST_LOW (t) < u)
6721 return -1;
6722 else
6723 return 1;
6726 /* Return true if SIZE represents a constant size that is in bounds of
6727 what the middle-end and the backend accepts (covering not more than
6728 half of the address-space).
6729 When PERR is non-null, set *PERR on failure to the description of
6730 why SIZE is not valid. */
6732 bool
6733 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6735 if (POLY_INT_CST_P (size))
6737 if (TREE_OVERFLOW (size))
6738 return false;
6739 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6740 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6741 return false;
6742 return true;
6745 cst_size_error error;
6746 if (!perr)
6747 perr = &error;
6749 if (TREE_CODE (size) != INTEGER_CST)
6751 *perr = cst_size_not_constant;
6752 return false;
6755 if (TREE_OVERFLOW_P (size))
6757 *perr = cst_size_overflow;
6758 return false;
6761 if (tree_int_cst_sgn (size) < 0)
6763 *perr = cst_size_negative;
6764 return false;
6766 if (!tree_fits_uhwi_p (size)
6767 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6768 < wi::to_widest (size) * 2))
6770 *perr = cst_size_too_big;
6771 return false;
6774 return true;
6777 /* Return the precision of the type, or for a complex or vector type the
6778 precision of the type of its elements. */
6780 unsigned int
6781 element_precision (const_tree type)
6783 if (!TYPE_P (type))
6784 type = TREE_TYPE (type);
6785 enum tree_code code = TREE_CODE (type);
6786 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6787 type = TREE_TYPE (type);
6789 return TYPE_PRECISION (type);
6792 /* Return true if CODE represents an associative tree code. Otherwise
6793 return false. */
6794 bool
6795 associative_tree_code (enum tree_code code)
6797 switch (code)
6799 case BIT_IOR_EXPR:
6800 case BIT_AND_EXPR:
6801 case BIT_XOR_EXPR:
6802 case PLUS_EXPR:
6803 case MULT_EXPR:
6804 case MIN_EXPR:
6805 case MAX_EXPR:
6806 return true;
6808 default:
6809 break;
6811 return false;
6814 /* Return true if CODE represents a commutative tree code. Otherwise
6815 return false. */
6816 bool
6817 commutative_tree_code (enum tree_code code)
6819 switch (code)
6821 case PLUS_EXPR:
6822 case MULT_EXPR:
6823 case MULT_HIGHPART_EXPR:
6824 case MIN_EXPR:
6825 case MAX_EXPR:
6826 case BIT_IOR_EXPR:
6827 case BIT_XOR_EXPR:
6828 case BIT_AND_EXPR:
6829 case NE_EXPR:
6830 case EQ_EXPR:
6831 case UNORDERED_EXPR:
6832 case ORDERED_EXPR:
6833 case UNEQ_EXPR:
6834 case LTGT_EXPR:
6835 case TRUTH_AND_EXPR:
6836 case TRUTH_XOR_EXPR:
6837 case TRUTH_OR_EXPR:
6838 case WIDEN_MULT_EXPR:
6839 case VEC_WIDEN_MULT_HI_EXPR:
6840 case VEC_WIDEN_MULT_LO_EXPR:
6841 case VEC_WIDEN_MULT_EVEN_EXPR:
6842 case VEC_WIDEN_MULT_ODD_EXPR:
6843 return true;
6845 default:
6846 break;
6848 return false;
6851 /* Return true if CODE represents a ternary tree code for which the
6852 first two operands are commutative. Otherwise return false. */
6853 bool
6854 commutative_ternary_tree_code (enum tree_code code)
6856 switch (code)
6858 case WIDEN_MULT_PLUS_EXPR:
6859 case WIDEN_MULT_MINUS_EXPR:
6860 case DOT_PROD_EXPR:
6861 return true;
6863 default:
6864 break;
6866 return false;
6869 /* Returns true if CODE can overflow. */
6871 bool
6872 operation_can_overflow (enum tree_code code)
6874 switch (code)
6876 case PLUS_EXPR:
6877 case MINUS_EXPR:
6878 case MULT_EXPR:
6879 case LSHIFT_EXPR:
6880 /* Can overflow in various ways. */
6881 return true;
6882 case TRUNC_DIV_EXPR:
6883 case EXACT_DIV_EXPR:
6884 case FLOOR_DIV_EXPR:
6885 case CEIL_DIV_EXPR:
6886 /* For INT_MIN / -1. */
6887 return true;
6888 case NEGATE_EXPR:
6889 case ABS_EXPR:
6890 /* For -INT_MIN. */
6891 return true;
6892 default:
6893 /* These operators cannot overflow. */
6894 return false;
6898 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6899 ftrapv doesn't generate trapping insns for CODE. */
6901 bool
6902 operation_no_trapping_overflow (tree type, enum tree_code code)
6904 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6906 /* We don't generate instructions that trap on overflow for complex or vector
6907 types. */
6908 if (!INTEGRAL_TYPE_P (type))
6909 return true;
6911 if (!TYPE_OVERFLOW_TRAPS (type))
6912 return true;
6914 switch (code)
6916 case PLUS_EXPR:
6917 case MINUS_EXPR:
6918 case MULT_EXPR:
6919 case NEGATE_EXPR:
6920 case ABS_EXPR:
6921 /* These operators can overflow, and -ftrapv generates trapping code for
6922 these. */
6923 return false;
6924 case TRUNC_DIV_EXPR:
6925 case EXACT_DIV_EXPR:
6926 case FLOOR_DIV_EXPR:
6927 case CEIL_DIV_EXPR:
6928 case LSHIFT_EXPR:
6929 /* These operators can overflow, but -ftrapv does not generate trapping
6930 code for these. */
6931 return true;
6932 default:
6933 /* These operators cannot overflow. */
6934 return true;
6938 /* Constructors for pointer, array and function types.
6939 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6940 constructed by language-dependent code, not here.) */
6942 /* Construct, lay out and return the type of pointers to TO_TYPE with
6943 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6944 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6945 indicate this type can reference all of memory. If such a type has
6946 already been constructed, reuse it. */
6948 tree
6949 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6950 bool can_alias_all)
6952 tree t;
6953 bool could_alias = can_alias_all;
6955 if (to_type == error_mark_node)
6956 return error_mark_node;
6958 if (mode == VOIDmode)
6960 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6961 mode = targetm.addr_space.pointer_mode (as);
6964 /* If the pointed-to type has the may_alias attribute set, force
6965 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6966 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6967 can_alias_all = true;
6969 /* In some cases, languages will have things that aren't a POINTER_TYPE
6970 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6971 In that case, return that type without regard to the rest of our
6972 operands.
6974 ??? This is a kludge, but consistent with the way this function has
6975 always operated and there doesn't seem to be a good way to avoid this
6976 at the moment. */
6977 if (TYPE_POINTER_TO (to_type) != 0
6978 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6979 return TYPE_POINTER_TO (to_type);
6981 /* First, if we already have a type for pointers to TO_TYPE and it's
6982 the proper mode, use it. */
6983 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6984 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6985 return t;
6987 t = make_node (POINTER_TYPE);
6989 TREE_TYPE (t) = to_type;
6990 SET_TYPE_MODE (t, mode);
6991 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6992 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6993 TYPE_POINTER_TO (to_type) = t;
6995 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6996 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6997 SET_TYPE_STRUCTURAL_EQUALITY (t);
6998 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6999 TYPE_CANONICAL (t)
7000 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7001 mode, false);
7003 /* Lay out the type. This function has many callers that are concerned
7004 with expression-construction, and this simplifies them all. */
7005 layout_type (t);
7007 return t;
7010 /* By default build pointers in ptr_mode. */
7012 tree
7013 build_pointer_type (tree to_type)
7015 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7018 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7020 tree
7021 build_reference_type_for_mode (tree to_type, machine_mode mode,
7022 bool can_alias_all)
7024 tree t;
7025 bool could_alias = can_alias_all;
7027 if (to_type == error_mark_node)
7028 return error_mark_node;
7030 if (mode == VOIDmode)
7032 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7033 mode = targetm.addr_space.pointer_mode (as);
7036 /* If the pointed-to type has the may_alias attribute set, force
7037 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7038 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7039 can_alias_all = true;
7041 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7042 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7043 In that case, return that type without regard to the rest of our
7044 operands.
7046 ??? This is a kludge, but consistent with the way this function has
7047 always operated and there doesn't seem to be a good way to avoid this
7048 at the moment. */
7049 if (TYPE_REFERENCE_TO (to_type) != 0
7050 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7051 return TYPE_REFERENCE_TO (to_type);
7053 /* First, if we already have a type for pointers to TO_TYPE and it's
7054 the proper mode, use it. */
7055 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7056 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7057 return t;
7059 t = make_node (REFERENCE_TYPE);
7061 TREE_TYPE (t) = to_type;
7062 SET_TYPE_MODE (t, mode);
7063 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7064 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7065 TYPE_REFERENCE_TO (to_type) = t;
7067 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7068 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7069 SET_TYPE_STRUCTURAL_EQUALITY (t);
7070 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7071 TYPE_CANONICAL (t)
7072 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7073 mode, false);
7075 layout_type (t);
7077 return t;
7081 /* Build the node for the type of references-to-TO_TYPE by default
7082 in ptr_mode. */
7084 tree
7085 build_reference_type (tree to_type)
7087 return build_reference_type_for_mode (to_type, VOIDmode, false);
7090 #define MAX_INT_CACHED_PREC \
7091 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7092 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7094 static void
7095 clear_nonstandard_integer_type_cache (void)
7097 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7099 nonstandard_integer_type_cache[i] = NULL;
7103 /* Builds a signed or unsigned integer type of precision PRECISION.
7104 Used for C bitfields whose precision does not match that of
7105 built-in target types. */
7106 tree
7107 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7108 int unsignedp)
7110 tree itype, ret;
7112 if (unsignedp)
7113 unsignedp = MAX_INT_CACHED_PREC + 1;
7115 if (precision <= MAX_INT_CACHED_PREC)
7117 itype = nonstandard_integer_type_cache[precision + unsignedp];
7118 if (itype)
7119 return itype;
7122 itype = make_node (INTEGER_TYPE);
7123 TYPE_PRECISION (itype) = precision;
7125 if (unsignedp)
7126 fixup_unsigned_type (itype);
7127 else
7128 fixup_signed_type (itype);
7130 inchash::hash hstate;
7131 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7132 ret = type_hash_canon (hstate.end (), itype);
7133 if (precision <= MAX_INT_CACHED_PREC)
7134 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7136 return ret;
7139 #define MAX_BOOL_CACHED_PREC \
7140 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7141 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7143 /* Builds a boolean type of precision PRECISION.
7144 Used for boolean vectors to choose proper vector element size. */
7145 tree
7146 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7148 tree type;
7150 if (precision <= MAX_BOOL_CACHED_PREC)
7152 type = nonstandard_boolean_type_cache[precision];
7153 if (type)
7154 return type;
7157 type = make_node (BOOLEAN_TYPE);
7158 TYPE_PRECISION (type) = precision;
7159 fixup_signed_type (type);
7161 if (precision <= MAX_INT_CACHED_PREC)
7162 nonstandard_boolean_type_cache[precision] = type;
7164 return type;
7167 static GTY(()) vec<tree, va_gc> *bitint_type_cache;
7169 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7170 tree
7171 build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
7173 tree itype, ret;
7175 gcc_checking_assert (precision >= 1 + !unsignedp);
7177 if (unsignedp)
7178 unsignedp = MAX_INT_CACHED_PREC + 1;
7180 if (bitint_type_cache == NULL)
7181 vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
7183 if (precision <= MAX_INT_CACHED_PREC)
7185 itype = (*bitint_type_cache)[precision + unsignedp];
7186 if (itype)
7187 return itype;
7190 itype = make_node (BITINT_TYPE);
7191 TYPE_PRECISION (itype) = precision;
7193 if (unsignedp)
7194 fixup_unsigned_type (itype);
7195 else
7196 fixup_signed_type (itype);
7198 inchash::hash hstate;
7199 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7200 ret = type_hash_canon (hstate.end (), itype);
7201 if (precision <= MAX_INT_CACHED_PREC)
7202 (*bitint_type_cache)[precision + unsignedp] = ret;
7204 return ret;
7207 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7208 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7209 is true, reuse such a type that has already been constructed. */
7211 static tree
7212 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7214 tree itype = make_node (INTEGER_TYPE);
7216 TREE_TYPE (itype) = type;
7218 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7219 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7221 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7222 SET_TYPE_MODE (itype, TYPE_MODE (type));
7223 TYPE_SIZE (itype) = TYPE_SIZE (type);
7224 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7225 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7226 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7227 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7229 if (!shared)
7230 return itype;
7232 if ((TYPE_MIN_VALUE (itype)
7233 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7234 || (TYPE_MAX_VALUE (itype)
7235 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7237 /* Since we cannot reliably merge this type, we need to compare it using
7238 structural equality checks. */
7239 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7240 return itype;
7243 hashval_t hash = type_hash_canon_hash (itype);
7244 itype = type_hash_canon (hash, itype);
7246 return itype;
7249 /* Wrapper around build_range_type_1 with SHARED set to true. */
7251 tree
7252 build_range_type (tree type, tree lowval, tree highval)
7254 return build_range_type_1 (type, lowval, highval, true);
7257 /* Wrapper around build_range_type_1 with SHARED set to false. */
7259 tree
7260 build_nonshared_range_type (tree type, tree lowval, tree highval)
7262 return build_range_type_1 (type, lowval, highval, false);
7265 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7266 MAXVAL should be the maximum value in the domain
7267 (one less than the length of the array).
7269 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7270 We don't enforce this limit, that is up to caller (e.g. language front end).
7271 The limit exists because the result is a signed type and we don't handle
7272 sizes that use more than one HOST_WIDE_INT. */
7274 tree
7275 build_index_type (tree maxval)
7277 return build_range_type (sizetype, size_zero_node, maxval);
7280 /* Return true if the debug information for TYPE, a subtype, should be emitted
7281 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7282 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7283 debug info and doesn't reflect the source code. */
7285 bool
7286 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7288 tree base_type = TREE_TYPE (type), low, high;
7290 /* Subrange types have a base type which is an integral type. */
7291 if (!INTEGRAL_TYPE_P (base_type))
7292 return false;
7294 /* Get the real bounds of the subtype. */
7295 if (lang_hooks.types.get_subrange_bounds)
7296 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7297 else
7299 low = TYPE_MIN_VALUE (type);
7300 high = TYPE_MAX_VALUE (type);
7303 /* If the type and its base type have the same representation and the same
7304 name, then the type is not a subrange but a copy of the base type. */
7305 if ((TREE_CODE (base_type) == INTEGER_TYPE
7306 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7307 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7308 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7309 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7310 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7311 return false;
7313 if (lowval)
7314 *lowval = low;
7315 if (highval)
7316 *highval = high;
7317 return true;
7320 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7321 and number of elements specified by the range of values of INDEX_TYPE.
7322 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7323 If SHARED is true, reuse such a type that has already been constructed.
7324 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7326 tree
7327 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7328 bool shared, bool set_canonical)
7330 tree t;
7332 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7334 error ("arrays of functions are not meaningful");
7335 elt_type = integer_type_node;
7338 t = make_node (ARRAY_TYPE);
7339 TREE_TYPE (t) = elt_type;
7340 TYPE_DOMAIN (t) = index_type;
7341 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7342 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7343 layout_type (t);
7345 if (shared)
7347 hashval_t hash = type_hash_canon_hash (t);
7348 t = type_hash_canon (hash, t);
7351 if (TYPE_CANONICAL (t) == t && set_canonical)
7353 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7354 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7355 || in_lto_p)
7356 SET_TYPE_STRUCTURAL_EQUALITY (t);
7357 else if (TYPE_CANONICAL (elt_type) != elt_type
7358 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7359 TYPE_CANONICAL (t)
7360 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7361 index_type
7362 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7363 typeless_storage, shared, set_canonical);
7366 return t;
7369 /* Wrapper around build_array_type_1 with SHARED set to true. */
7371 tree
7372 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7374 return
7375 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7378 /* Wrapper around build_array_type_1 with SHARED set to false. */
7380 tree
7381 build_nonshared_array_type (tree elt_type, tree index_type)
7383 return build_array_type_1 (elt_type, index_type, false, false, true);
7386 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7387 sizetype. */
7389 tree
7390 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7392 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7395 /* Computes the canonical argument types from the argument type list
7396 ARGTYPES.
7398 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7399 on entry to this function, or if any of the ARGTYPES are
7400 structural.
7402 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7403 true on entry to this function, or if any of the ARGTYPES are
7404 non-canonical.
7406 Returns a canonical argument list, which may be ARGTYPES when the
7407 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7408 true) or would not differ from ARGTYPES. */
7410 static tree
7411 maybe_canonicalize_argtypes (tree argtypes,
7412 bool *any_structural_p,
7413 bool *any_noncanonical_p)
7415 tree arg;
7416 bool any_noncanonical_argtypes_p = false;
7418 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7420 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7421 /* Fail gracefully by stating that the type is structural. */
7422 *any_structural_p = true;
7423 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7424 *any_structural_p = true;
7425 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7426 || TREE_PURPOSE (arg))
7427 /* If the argument has a default argument, we consider it
7428 non-canonical even though the type itself is canonical.
7429 That way, different variants of function and method types
7430 with default arguments will all point to the variant with
7431 no defaults as their canonical type. */
7432 any_noncanonical_argtypes_p = true;
7435 if (*any_structural_p)
7436 return argtypes;
7438 if (any_noncanonical_argtypes_p)
7440 /* Build the canonical list of argument types. */
7441 tree canon_argtypes = NULL_TREE;
7442 bool is_void = false;
7444 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7446 if (arg == void_list_node)
7447 is_void = true;
7448 else
7449 canon_argtypes = tree_cons (NULL_TREE,
7450 TYPE_CANONICAL (TREE_VALUE (arg)),
7451 canon_argtypes);
7454 canon_argtypes = nreverse (canon_argtypes);
7455 if (is_void)
7456 canon_argtypes = chainon (canon_argtypes, void_list_node);
7458 /* There is a non-canonical type. */
7459 *any_noncanonical_p = true;
7460 return canon_argtypes;
7463 /* The canonical argument types are the same as ARGTYPES. */
7464 return argtypes;
7467 /* Construct, lay out and return
7468 the type of functions returning type VALUE_TYPE
7469 given arguments of types ARG_TYPES.
7470 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7471 are data type nodes for the arguments of the function.
7472 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7473 variable-arguments function with (...) prototype (no named arguments).
7474 If such a type has already been constructed, reuse it. */
7476 tree
7477 build_function_type (tree value_type, tree arg_types,
7478 bool no_named_args_stdarg_p)
7480 tree t;
7481 inchash::hash hstate;
7482 bool any_structural_p, any_noncanonical_p;
7483 tree canon_argtypes;
7485 gcc_assert (arg_types != error_mark_node);
7487 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7489 error ("function return type cannot be function");
7490 value_type = integer_type_node;
7493 /* Make a node of the sort we want. */
7494 t = make_node (FUNCTION_TYPE);
7495 TREE_TYPE (t) = value_type;
7496 TYPE_ARG_TYPES (t) = arg_types;
7497 if (no_named_args_stdarg_p)
7499 gcc_assert (arg_types == NULL_TREE);
7500 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7503 /* If we already have such a type, use the old one. */
7504 hashval_t hash = type_hash_canon_hash (t);
7505 t = type_hash_canon (hash, t);
7507 /* Set up the canonical type. */
7508 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7509 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7510 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7511 &any_structural_p,
7512 &any_noncanonical_p);
7513 if (any_structural_p)
7514 SET_TYPE_STRUCTURAL_EQUALITY (t);
7515 else if (any_noncanonical_p)
7516 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7517 canon_argtypes);
7519 if (!COMPLETE_TYPE_P (t))
7520 layout_type (t);
7521 return t;
7524 /* Build a function type. The RETURN_TYPE is the type returned by the
7525 function. If VAARGS is set, no void_type_node is appended to the
7526 list. ARGP must be always be terminated be a NULL_TREE. */
7528 static tree
7529 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7531 tree t, args, last;
7533 t = va_arg (argp, tree);
7534 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7535 args = tree_cons (NULL_TREE, t, args);
7537 if (vaargs)
7539 last = args;
7540 if (args != NULL_TREE)
7541 args = nreverse (args);
7542 gcc_assert (last != void_list_node);
7544 else if (args == NULL_TREE)
7545 args = void_list_node;
7546 else
7548 last = args;
7549 args = nreverse (args);
7550 TREE_CHAIN (last) = void_list_node;
7552 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7554 return args;
7557 /* Build a function type. The RETURN_TYPE is the type returned by the
7558 function. If additional arguments are provided, they are
7559 additional argument types. The list of argument types must always
7560 be terminated by NULL_TREE. */
7562 tree
7563 build_function_type_list (tree return_type, ...)
7565 tree args;
7566 va_list p;
7568 va_start (p, return_type);
7569 args = build_function_type_list_1 (false, return_type, p);
7570 va_end (p);
7571 return args;
7574 /* Build a variable argument function type. The RETURN_TYPE is the
7575 type returned by the function. If additional arguments are provided,
7576 they are additional argument types. The list of argument types must
7577 always be terminated by NULL_TREE. */
7579 tree
7580 build_varargs_function_type_list (tree return_type, ...)
7582 tree args;
7583 va_list p;
7585 va_start (p, return_type);
7586 args = build_function_type_list_1 (true, return_type, p);
7587 va_end (p);
7589 return args;
7592 /* Build a function type. RETURN_TYPE is the type returned by the
7593 function; VAARGS indicates whether the function takes varargs. The
7594 function takes N named arguments, the types of which are provided in
7595 ARG_TYPES. */
7597 static tree
7598 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7599 tree *arg_types)
7601 int i;
7602 tree t = vaargs ? NULL_TREE : void_list_node;
7604 for (i = n - 1; i >= 0; i--)
7605 t = tree_cons (NULL_TREE, arg_types[i], t);
7607 return build_function_type (return_type, t, vaargs && n == 0);
7610 /* Build a function type. RETURN_TYPE is the type returned by the
7611 function. The function takes N named arguments, the types of which
7612 are provided in ARG_TYPES. */
7614 tree
7615 build_function_type_array (tree return_type, int n, tree *arg_types)
7617 return build_function_type_array_1 (false, return_type, n, arg_types);
7620 /* Build a variable argument function type. RETURN_TYPE is the type
7621 returned by the function. The function takes N named arguments, the
7622 types of which are provided in ARG_TYPES. */
7624 tree
7625 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7627 return build_function_type_array_1 (true, return_type, n, arg_types);
7630 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7631 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7632 for the method. An implicit additional parameter (of type
7633 pointer-to-BASETYPE) is added to the ARGTYPES. */
7635 tree
7636 build_method_type_directly (tree basetype,
7637 tree rettype,
7638 tree argtypes)
7640 tree t;
7641 tree ptype;
7642 bool any_structural_p, any_noncanonical_p;
7643 tree canon_argtypes;
7645 /* Make a node of the sort we want. */
7646 t = make_node (METHOD_TYPE);
7648 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7649 TREE_TYPE (t) = rettype;
7650 ptype = build_pointer_type (basetype);
7652 /* The actual arglist for this function includes a "hidden" argument
7653 which is "this". Put it into the list of argument types. */
7654 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7655 TYPE_ARG_TYPES (t) = argtypes;
7657 /* If we already have such a type, use the old one. */
7658 hashval_t hash = type_hash_canon_hash (t);
7659 t = type_hash_canon (hash, t);
7661 /* Set up the canonical type. */
7662 any_structural_p
7663 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7664 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7665 any_noncanonical_p
7666 = (TYPE_CANONICAL (basetype) != basetype
7667 || TYPE_CANONICAL (rettype) != rettype);
7668 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7669 &any_structural_p,
7670 &any_noncanonical_p);
7671 if (any_structural_p)
7672 SET_TYPE_STRUCTURAL_EQUALITY (t);
7673 else if (any_noncanonical_p)
7674 TYPE_CANONICAL (t)
7675 = build_method_type_directly (TYPE_CANONICAL (basetype),
7676 TYPE_CANONICAL (rettype),
7677 canon_argtypes);
7678 if (!COMPLETE_TYPE_P (t))
7679 layout_type (t);
7681 return t;
7684 /* Construct, lay out and return the type of methods belonging to class
7685 BASETYPE and whose arguments and values are described by TYPE.
7686 If that type exists already, reuse it.
7687 TYPE must be a FUNCTION_TYPE node. */
7689 tree
7690 build_method_type (tree basetype, tree type)
7692 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7694 return build_method_type_directly (basetype,
7695 TREE_TYPE (type),
7696 TYPE_ARG_TYPES (type));
7699 /* Construct, lay out and return the type of offsets to a value
7700 of type TYPE, within an object of type BASETYPE.
7701 If a suitable offset type exists already, reuse it. */
7703 tree
7704 build_offset_type (tree basetype, tree type)
7706 tree t;
7708 /* Make a node of the sort we want. */
7709 t = make_node (OFFSET_TYPE);
7711 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7712 TREE_TYPE (t) = type;
7714 /* If we already have such a type, use the old one. */
7715 hashval_t hash = type_hash_canon_hash (t);
7716 t = type_hash_canon (hash, t);
7718 if (!COMPLETE_TYPE_P (t))
7719 layout_type (t);
7721 if (TYPE_CANONICAL (t) == t)
7723 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7724 || TYPE_STRUCTURAL_EQUALITY_P (type))
7725 SET_TYPE_STRUCTURAL_EQUALITY (t);
7726 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7727 || TYPE_CANONICAL (type) != type)
7728 TYPE_CANONICAL (t)
7729 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7730 TYPE_CANONICAL (type));
7733 return t;
7736 /* Create a complex type whose components are COMPONENT_TYPE.
7738 If NAMED is true, the type is given a TYPE_NAME. We do not always
7739 do so because this creates a DECL node and thus make the DECL_UIDs
7740 dependent on the type canonicalization hashtable, which is GC-ed,
7741 so the DECL_UIDs would not be stable wrt garbage collection. */
7743 tree
7744 build_complex_type (tree component_type, bool named)
7746 gcc_assert (INTEGRAL_TYPE_P (component_type)
7747 || SCALAR_FLOAT_TYPE_P (component_type)
7748 || FIXED_POINT_TYPE_P (component_type));
7750 /* Make a node of the sort we want. */
7751 tree probe = make_node (COMPLEX_TYPE);
7753 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7755 /* If we already have such a type, use the old one. */
7756 hashval_t hash = type_hash_canon_hash (probe);
7757 tree t = type_hash_canon (hash, probe);
7759 if (t == probe)
7761 /* We created a new type. The hash insertion will have laid
7762 out the type. We need to check the canonicalization and
7763 maybe set the name. */
7764 gcc_checking_assert (COMPLETE_TYPE_P (t)
7765 && !TYPE_NAME (t)
7766 && TYPE_CANONICAL (t) == t);
7768 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7769 SET_TYPE_STRUCTURAL_EQUALITY (t);
7770 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7771 TYPE_CANONICAL (t)
7772 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7774 /* We need to create a name, since complex is a fundamental type. */
7775 if (named)
7777 const char *name = NULL;
7779 if (TREE_TYPE (t) == char_type_node)
7780 name = "complex char";
7781 else if (TREE_TYPE (t) == signed_char_type_node)
7782 name = "complex signed char";
7783 else if (TREE_TYPE (t) == unsigned_char_type_node)
7784 name = "complex unsigned char";
7785 else if (TREE_TYPE (t) == short_integer_type_node)
7786 name = "complex short int";
7787 else if (TREE_TYPE (t) == short_unsigned_type_node)
7788 name = "complex short unsigned int";
7789 else if (TREE_TYPE (t) == integer_type_node)
7790 name = "complex int";
7791 else if (TREE_TYPE (t) == unsigned_type_node)
7792 name = "complex unsigned int";
7793 else if (TREE_TYPE (t) == long_integer_type_node)
7794 name = "complex long int";
7795 else if (TREE_TYPE (t) == long_unsigned_type_node)
7796 name = "complex long unsigned int";
7797 else if (TREE_TYPE (t) == long_long_integer_type_node)
7798 name = "complex long long int";
7799 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7800 name = "complex long long unsigned int";
7802 if (name != NULL)
7803 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7804 get_identifier (name), t);
7808 return build_qualified_type (t, TYPE_QUALS (component_type));
7811 /* If TYPE is a real or complex floating-point type and the target
7812 does not directly support arithmetic on TYPE then return the wider
7813 type to be used for arithmetic on TYPE. Otherwise, return
7814 NULL_TREE. */
7816 tree
7817 excess_precision_type (tree type)
7819 /* The target can give two different responses to the question of
7820 which excess precision mode it would like depending on whether we
7821 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7823 enum excess_precision_type requested_type
7824 = (flag_excess_precision == EXCESS_PRECISION_FAST
7825 ? EXCESS_PRECISION_TYPE_FAST
7826 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7827 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7829 enum flt_eval_method target_flt_eval_method
7830 = targetm.c.excess_precision (requested_type);
7832 /* The target should not ask for unpredictable float evaluation (though
7833 it might advertise that implicitly the evaluation is unpredictable,
7834 but we don't care about that here, it will have been reported
7835 elsewhere). If it does ask for unpredictable evaluation, we have
7836 nothing to do here. */
7837 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7839 /* Nothing to do. The target has asked for all types we know about
7840 to be computed with their native precision and range. */
7841 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7842 return NULL_TREE;
7844 /* The target will promote this type in a target-dependent way, so excess
7845 precision ought to leave it alone. */
7846 if (targetm.promoted_type (type) != NULL_TREE)
7847 return NULL_TREE;
7849 machine_mode float16_type_mode = (float16_type_node
7850 ? TYPE_MODE (float16_type_node)
7851 : VOIDmode);
7852 machine_mode bfloat16_type_mode = (bfloat16_type_node
7853 ? TYPE_MODE (bfloat16_type_node)
7854 : VOIDmode);
7855 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7856 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7858 switch (TREE_CODE (type))
7860 case REAL_TYPE:
7862 machine_mode type_mode = TYPE_MODE (type);
7863 switch (target_flt_eval_method)
7865 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7866 if (type_mode == float16_type_mode
7867 || type_mode == bfloat16_type_mode)
7868 return float_type_node;
7869 break;
7870 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7871 if (type_mode == float16_type_mode
7872 || type_mode == bfloat16_type_mode
7873 || type_mode == float_type_mode)
7874 return double_type_node;
7875 break;
7876 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7877 if (type_mode == float16_type_mode
7878 || type_mode == bfloat16_type_mode
7879 || type_mode == float_type_mode
7880 || type_mode == double_type_mode)
7881 return long_double_type_node;
7882 break;
7883 default:
7884 gcc_unreachable ();
7886 break;
7888 case COMPLEX_TYPE:
7890 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7891 return NULL_TREE;
7892 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7893 switch (target_flt_eval_method)
7895 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7896 if (type_mode == float16_type_mode
7897 || type_mode == bfloat16_type_mode)
7898 return complex_float_type_node;
7899 break;
7900 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7901 if (type_mode == float16_type_mode
7902 || type_mode == bfloat16_type_mode
7903 || type_mode == float_type_mode)
7904 return complex_double_type_node;
7905 break;
7906 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7907 if (type_mode == float16_type_mode
7908 || type_mode == bfloat16_type_mode
7909 || type_mode == float_type_mode
7910 || type_mode == double_type_mode)
7911 return complex_long_double_type_node;
7912 break;
7913 default:
7914 gcc_unreachable ();
7916 break;
7918 default:
7919 break;
7922 return NULL_TREE;
7925 /* Return OP, stripped of any conversions to wider types as much as is safe.
7926 Converting the value back to OP's type makes a value equivalent to OP.
7928 If FOR_TYPE is nonzero, we return a value which, if converted to
7929 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7931 OP must have integer, real or enumeral type. Pointers are not allowed!
7933 There are some cases where the obvious value we could return
7934 would regenerate to OP if converted to OP's type,
7935 but would not extend like OP to wider types.
7936 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7937 For example, if OP is (unsigned short)(signed char)-1,
7938 we avoid returning (signed char)-1 if FOR_TYPE is int,
7939 even though extending that to an unsigned short would regenerate OP,
7940 since the result of extending (signed char)-1 to (int)
7941 is different from (int) OP. */
7943 tree
7944 get_unwidened (tree op, tree for_type)
7946 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7947 tree type = TREE_TYPE (op);
7948 unsigned final_prec
7949 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7950 int uns
7951 = (for_type != 0 && for_type != type
7952 && final_prec > TYPE_PRECISION (type)
7953 && TYPE_UNSIGNED (type));
7954 tree win = op;
7956 while (CONVERT_EXPR_P (op))
7958 int bitschange;
7960 /* TYPE_PRECISION on vector types has different meaning
7961 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7962 so avoid them here. */
7963 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7964 break;
7966 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7967 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7969 /* Truncations are many-one so cannot be removed.
7970 Unless we are later going to truncate down even farther. */
7971 if (bitschange < 0
7972 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7973 break;
7975 /* See what's inside this conversion. If we decide to strip it,
7976 we will set WIN. */
7977 op = TREE_OPERAND (op, 0);
7979 /* If we have not stripped any zero-extensions (uns is 0),
7980 we can strip any kind of extension.
7981 If we have previously stripped a zero-extension,
7982 only zero-extensions can safely be stripped.
7983 Any extension can be stripped if the bits it would produce
7984 are all going to be discarded later by truncating to FOR_TYPE. */
7986 if (bitschange > 0)
7988 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7989 win = op;
7990 /* TYPE_UNSIGNED says whether this is a zero-extension.
7991 Let's avoid computing it if it does not affect WIN
7992 and if UNS will not be needed again. */
7993 if ((uns
7994 || CONVERT_EXPR_P (op))
7995 && TYPE_UNSIGNED (TREE_TYPE (op)))
7997 uns = 1;
7998 win = op;
8003 /* If we finally reach a constant see if it fits in sth smaller and
8004 in that case convert it. */
8005 if (TREE_CODE (win) == INTEGER_CST)
8007 tree wtype = TREE_TYPE (win);
8008 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8009 if (for_type)
8010 prec = MAX (prec, final_prec);
8011 if (prec < TYPE_PRECISION (wtype))
8013 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8014 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8015 win = fold_convert (t, win);
8019 return win;
8022 /* Return OP or a simpler expression for a narrower value
8023 which can be sign-extended or zero-extended to give back OP.
8024 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8025 or 0 if the value should be sign-extended. */
8027 tree
8028 get_narrower (tree op, int *unsignedp_ptr)
8030 int uns = 0;
8031 bool first = true;
8032 tree win = op;
8033 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8035 if (TREE_CODE (op) == COMPOUND_EXPR)
8038 op = TREE_OPERAND (op, 1);
8039 while (TREE_CODE (op) == COMPOUND_EXPR);
8040 tree ret = get_narrower (op, unsignedp_ptr);
8041 if (ret == op)
8042 return win;
8043 auto_vec <tree, 16> v;
8044 unsigned int i;
8045 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8046 op = TREE_OPERAND (op, 1))
8047 v.safe_push (op);
8048 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8049 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8050 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8051 ret);
8052 return ret;
8054 while (TREE_CODE (op) == NOP_EXPR)
8056 int bitschange
8057 = (TYPE_PRECISION (TREE_TYPE (op))
8058 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8060 /* Truncations are many-one so cannot be removed. */
8061 if (bitschange < 0)
8062 break;
8064 /* See what's inside this conversion. If we decide to strip it,
8065 we will set WIN. */
8067 if (bitschange > 0)
8069 op = TREE_OPERAND (op, 0);
8070 /* An extension: the outermost one can be stripped,
8071 but remember whether it is zero or sign extension. */
8072 if (first)
8073 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8074 /* Otherwise, if a sign extension has been stripped,
8075 only sign extensions can now be stripped;
8076 if a zero extension has been stripped, only zero-extensions. */
8077 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8078 break;
8079 first = false;
8081 else /* bitschange == 0 */
8083 /* A change in nominal type can always be stripped, but we must
8084 preserve the unsignedness. */
8085 if (first)
8086 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8087 first = false;
8088 op = TREE_OPERAND (op, 0);
8089 /* Keep trying to narrow, but don't assign op to win if it
8090 would turn an integral type into something else. */
8091 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8092 continue;
8095 win = op;
8098 if (TREE_CODE (op) == COMPONENT_REF
8099 /* Since type_for_size always gives an integer type. */
8100 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8101 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8102 /* Ensure field is laid out already. */
8103 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8104 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8106 unsigned HOST_WIDE_INT innerprec
8107 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8108 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8109 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8110 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8112 /* We can get this structure field in a narrower type that fits it,
8113 but the resulting extension to its nominal type (a fullword type)
8114 must satisfy the same conditions as for other extensions.
8116 Do this only for fields that are aligned (not bit-fields),
8117 because when bit-field insns will be used there is no
8118 advantage in doing this. */
8120 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8121 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8122 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8123 && type != 0)
8125 if (first)
8126 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8127 win = fold_convert (type, op);
8131 *unsignedp_ptr = uns;
8132 return win;
8135 /* Return true if integer constant C has a value that is permissible
8136 for TYPE, an integral type. */
8138 bool
8139 int_fits_type_p (const_tree c, const_tree type)
8141 tree type_low_bound, type_high_bound;
8142 bool ok_for_low_bound, ok_for_high_bound;
8143 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8145 /* Non-standard boolean types can have arbitrary precision but various
8146 transformations assume that they can only take values 0 and +/-1. */
8147 if (TREE_CODE (type) == BOOLEAN_TYPE)
8148 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8150 retry:
8151 type_low_bound = TYPE_MIN_VALUE (type);
8152 type_high_bound = TYPE_MAX_VALUE (type);
8154 /* If at least one bound of the type is a constant integer, we can check
8155 ourselves and maybe make a decision. If no such decision is possible, but
8156 this type is a subtype, try checking against that. Otherwise, use
8157 fits_to_tree_p, which checks against the precision.
8159 Compute the status for each possibly constant bound, and return if we see
8160 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8161 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8162 for "constant known to fit". */
8164 /* Check if c >= type_low_bound. */
8165 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8167 if (tree_int_cst_lt (c, type_low_bound))
8168 return false;
8169 ok_for_low_bound = true;
8171 else
8172 ok_for_low_bound = false;
8174 /* Check if c <= type_high_bound. */
8175 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8177 if (tree_int_cst_lt (type_high_bound, c))
8178 return false;
8179 ok_for_high_bound = true;
8181 else
8182 ok_for_high_bound = false;
8184 /* If the constant fits both bounds, the result is known. */
8185 if (ok_for_low_bound && ok_for_high_bound)
8186 return true;
8188 /* Perform some generic filtering which may allow making a decision
8189 even if the bounds are not constant. First, negative integers
8190 never fit in unsigned types, */
8191 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8192 return false;
8194 /* Second, narrower types always fit in wider ones. */
8195 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8196 return true;
8198 /* Third, unsigned integers with top bit set never fit signed types. */
8199 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8201 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8202 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8204 /* When a tree_cst is converted to a wide-int, the precision
8205 is taken from the type. However, if the precision of the
8206 mode underneath the type is smaller than that, it is
8207 possible that the value will not fit. The test below
8208 fails if any bit is set between the sign bit of the
8209 underlying mode and the top bit of the type. */
8210 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8211 return false;
8213 else if (wi::neg_p (wi::to_wide (c)))
8214 return false;
8217 /* If we haven't been able to decide at this point, there nothing more we
8218 can check ourselves here. Look at the base type if we have one and it
8219 has the same precision. */
8220 if (TREE_CODE (type) == INTEGER_TYPE
8221 && TREE_TYPE (type) != 0
8222 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8224 type = TREE_TYPE (type);
8225 goto retry;
8228 /* Or to fits_to_tree_p, if nothing else. */
8229 return wi::fits_to_tree_p (wi::to_wide (c), type);
8232 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8233 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8234 represented (assuming two's-complement arithmetic) within the bit
8235 precision of the type are returned instead. */
8237 void
8238 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8240 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8241 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8242 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8243 else
8245 if (TYPE_UNSIGNED (type))
8246 mpz_set_ui (min, 0);
8247 else
8249 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8250 wi::to_mpz (mn, min, SIGNED);
8254 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8255 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8256 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8257 else
8259 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8260 wi::to_mpz (mn, max, TYPE_SIGN (type));
8264 /* Return true if VAR is an automatic variable. */
8266 bool
8267 auto_var_p (const_tree var)
8269 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8270 || TREE_CODE (var) == PARM_DECL)
8271 && ! TREE_STATIC (var))
8272 || TREE_CODE (var) == RESULT_DECL);
8275 /* Return true if VAR is an automatic variable defined in function FN. */
8277 bool
8278 auto_var_in_fn_p (const_tree var, const_tree fn)
8280 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8281 && (auto_var_p (var)
8282 || TREE_CODE (var) == LABEL_DECL));
8285 /* Subprogram of following function. Called by walk_tree.
8287 Return *TP if it is an automatic variable or parameter of the
8288 function passed in as DATA. */
8290 static tree
8291 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8293 tree fn = (tree) data;
8295 if (TYPE_P (*tp))
8296 *walk_subtrees = 0;
8298 else if (DECL_P (*tp)
8299 && auto_var_in_fn_p (*tp, fn))
8300 return *tp;
8302 return NULL_TREE;
8305 /* Returns true if T is, contains, or refers to a type with variable
8306 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8307 arguments, but not the return type. If FN is nonzero, only return
8308 true if a modifier of the type or position of FN is a variable or
8309 parameter inside FN.
8311 This concept is more general than that of C99 'variably modified types':
8312 in C99, a struct type is never variably modified because a VLA may not
8313 appear as a structure member. However, in GNU C code like:
8315 struct S { int i[f()]; };
8317 is valid, and other languages may define similar constructs. */
8319 bool
8320 variably_modified_type_p (tree type, tree fn)
8322 tree t;
8324 /* Test if T is either variable (if FN is zero) or an expression containing
8325 a variable in FN. If TYPE isn't gimplified, return true also if
8326 gimplify_one_sizepos would gimplify the expression into a local
8327 variable. */
8328 #define RETURN_TRUE_IF_VAR(T) \
8329 do { tree _t = (T); \
8330 if (_t != NULL_TREE \
8331 && _t != error_mark_node \
8332 && !CONSTANT_CLASS_P (_t) \
8333 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8334 && (!fn \
8335 || (!TYPE_SIZES_GIMPLIFIED (type) \
8336 && (TREE_CODE (_t) != VAR_DECL \
8337 && !CONTAINS_PLACEHOLDER_P (_t))) \
8338 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8339 return true; } while (0)
8341 if (type == error_mark_node)
8342 return false;
8344 /* If TYPE itself has variable size, it is variably modified. */
8345 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8346 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8348 switch (TREE_CODE (type))
8350 case POINTER_TYPE:
8351 case REFERENCE_TYPE:
8352 case VECTOR_TYPE:
8353 /* Ada can have pointer types refering to themselves indirectly. */
8354 if (TREE_VISITED (type))
8355 return false;
8356 TREE_VISITED (type) = true;
8357 if (variably_modified_type_p (TREE_TYPE (type), fn))
8359 TREE_VISITED (type) = false;
8360 return true;
8362 TREE_VISITED (type) = false;
8363 break;
8365 case FUNCTION_TYPE:
8366 case METHOD_TYPE:
8367 /* If TYPE is a function type, it is variably modified if the
8368 return type is variably modified. */
8369 if (variably_modified_type_p (TREE_TYPE (type), fn))
8370 return true;
8371 break;
8373 case INTEGER_TYPE:
8374 case REAL_TYPE:
8375 case FIXED_POINT_TYPE:
8376 case ENUMERAL_TYPE:
8377 case BOOLEAN_TYPE:
8378 /* Scalar types are variably modified if their end points
8379 aren't constant. */
8380 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8381 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8382 break;
8384 case RECORD_TYPE:
8385 case UNION_TYPE:
8386 case QUAL_UNION_TYPE:
8387 /* We can't see if any of the fields are variably-modified by the
8388 definition we normally use, since that would produce infinite
8389 recursion via pointers. */
8390 /* This is variably modified if some field's type is. */
8391 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8392 if (TREE_CODE (t) == FIELD_DECL)
8394 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8395 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8396 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8398 /* If the type is a qualified union, then the DECL_QUALIFIER
8399 of fields can also be an expression containing a variable. */
8400 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8401 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8403 /* If the field is a qualified union, then it's only a container
8404 for what's inside so we look into it. That's necessary in LTO
8405 mode because the sizes of the field tested above have been set
8406 to PLACEHOLDER_EXPRs by free_lang_data. */
8407 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8408 && variably_modified_type_p (TREE_TYPE (t), fn))
8409 return true;
8411 break;
8413 case ARRAY_TYPE:
8414 /* Do not call ourselves to avoid infinite recursion. This is
8415 variably modified if the element type is. */
8416 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8417 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8418 break;
8420 default:
8421 break;
8424 /* The current language may have other cases to check, but in general,
8425 all other types are not variably modified. */
8426 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8428 #undef RETURN_TRUE_IF_VAR
8431 /* Given a DECL or TYPE, return the scope in which it was declared, or
8432 NULL_TREE if there is no containing scope. */
8434 tree
8435 get_containing_scope (const_tree t)
8437 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8440 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8442 const_tree
8443 get_ultimate_context (const_tree decl)
8445 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8447 if (TREE_CODE (decl) == BLOCK)
8448 decl = BLOCK_SUPERCONTEXT (decl);
8449 else
8450 decl = get_containing_scope (decl);
8452 return decl;
8455 /* Return the innermost context enclosing DECL that is
8456 a FUNCTION_DECL, or zero if none. */
8458 tree
8459 decl_function_context (const_tree decl)
8461 tree context;
8463 if (TREE_CODE (decl) == ERROR_MARK)
8464 return 0;
8466 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8467 where we look up the function at runtime. Such functions always take
8468 a first argument of type 'pointer to real context'.
8470 C++ should really be fixed to use DECL_CONTEXT for the real context,
8471 and use something else for the "virtual context". */
8472 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8473 context
8474 = TYPE_MAIN_VARIANT
8475 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8476 else
8477 context = DECL_CONTEXT (decl);
8479 while (context && TREE_CODE (context) != FUNCTION_DECL)
8481 if (TREE_CODE (context) == BLOCK)
8482 context = BLOCK_SUPERCONTEXT (context);
8483 else
8484 context = get_containing_scope (context);
8487 return context;
8490 /* Return the innermost context enclosing DECL that is
8491 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8492 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8494 tree
8495 decl_type_context (const_tree decl)
8497 tree context = DECL_CONTEXT (decl);
8499 while (context)
8500 switch (TREE_CODE (context))
8502 case NAMESPACE_DECL:
8503 case TRANSLATION_UNIT_DECL:
8504 return NULL_TREE;
8506 case RECORD_TYPE:
8507 case UNION_TYPE:
8508 case QUAL_UNION_TYPE:
8509 return context;
8511 case TYPE_DECL:
8512 case FUNCTION_DECL:
8513 context = DECL_CONTEXT (context);
8514 break;
8516 case BLOCK:
8517 context = BLOCK_SUPERCONTEXT (context);
8518 break;
8520 default:
8521 gcc_unreachable ();
8524 return NULL_TREE;
8527 /* CALL is a CALL_EXPR. Return the declaration for the function
8528 called, or NULL_TREE if the called function cannot be
8529 determined. */
8531 tree
8532 get_callee_fndecl (const_tree call)
8534 tree addr;
8536 if (call == error_mark_node)
8537 return error_mark_node;
8539 /* It's invalid to call this function with anything but a
8540 CALL_EXPR. */
8541 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8543 /* The first operand to the CALL is the address of the function
8544 called. */
8545 addr = CALL_EXPR_FN (call);
8547 /* If there is no function, return early. */
8548 if (addr == NULL_TREE)
8549 return NULL_TREE;
8551 STRIP_NOPS (addr);
8553 /* If this is a readonly function pointer, extract its initial value. */
8554 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8555 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8556 && DECL_INITIAL (addr))
8557 addr = DECL_INITIAL (addr);
8559 /* If the address is just `&f' for some function `f', then we know
8560 that `f' is being called. */
8561 if (TREE_CODE (addr) == ADDR_EXPR
8562 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8563 return TREE_OPERAND (addr, 0);
8565 /* We couldn't figure out what was being called. */
8566 return NULL_TREE;
8569 /* Return true when STMTs arguments and return value match those of FNDECL,
8570 a decl of a builtin function. */
8572 static bool
8573 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8575 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8577 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8578 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8579 fndecl = decl;
8581 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8582 if (gimple_form
8583 ? !useless_type_conversion_p (TREE_TYPE (call),
8584 TREE_TYPE (TREE_TYPE (fndecl)))
8585 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8586 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8587 return false;
8589 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8590 unsigned nargs = call_expr_nargs (call);
8591 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8593 /* Variadic args follow. */
8594 if (!targs)
8595 return true;
8596 tree arg = CALL_EXPR_ARG (call, i);
8597 tree type = TREE_VALUE (targs);
8598 if (gimple_form
8599 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8600 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8602 /* For pointer arguments be more forgiving, e.g. due to
8603 FILE * vs. fileptr_type_node, or say char * vs. const char *
8604 differences etc. */
8605 if (!gimple_form
8606 && POINTER_TYPE_P (type)
8607 && POINTER_TYPE_P (TREE_TYPE (arg))
8608 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8609 continue;
8610 /* char/short integral arguments are promoted to int
8611 by several frontends if targetm.calls.promote_prototypes
8612 is true. Allow such promotion too. */
8613 if (INTEGRAL_TYPE_P (type)
8614 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8615 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8616 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8617 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8618 && (gimple_form
8619 ? useless_type_conversion_p (integer_type_node,
8620 TREE_TYPE (arg))
8621 : tree_nop_conversion_p (integer_type_node,
8622 TREE_TYPE (arg))))
8623 continue;
8624 return false;
8627 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8628 return false;
8629 return true;
8632 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8633 return the associated function code, otherwise return CFN_LAST. */
8635 combined_fn
8636 get_call_combined_fn (const_tree call)
8638 /* It's invalid to call this function with anything but a CALL_EXPR. */
8639 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8641 if (!CALL_EXPR_FN (call))
8642 return as_combined_fn (CALL_EXPR_IFN (call));
8644 tree fndecl = get_callee_fndecl (call);
8645 if (fndecl
8646 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8647 && tree_builtin_call_types_compatible_p (call, fndecl))
8648 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8650 return CFN_LAST;
8653 /* Comparator of indices based on tree_node_counts. */
8655 static int
8656 tree_nodes_cmp (const void *p1, const void *p2)
8658 const unsigned *n1 = (const unsigned *)p1;
8659 const unsigned *n2 = (const unsigned *)p2;
8661 return tree_node_counts[*n1] - tree_node_counts[*n2];
8664 /* Comparator of indices based on tree_code_counts. */
8666 static int
8667 tree_codes_cmp (const void *p1, const void *p2)
8669 const unsigned *n1 = (const unsigned *)p1;
8670 const unsigned *n2 = (const unsigned *)p2;
8672 return tree_code_counts[*n1] - tree_code_counts[*n2];
8675 #define TREE_MEM_USAGE_SPACES 40
8677 /* Print debugging information about tree nodes generated during the compile,
8678 and any language-specific information. */
8680 void
8681 dump_tree_statistics (void)
8683 if (GATHER_STATISTICS)
8685 uint64_t total_nodes, total_bytes;
8686 fprintf (stderr, "\nKind Nodes Bytes\n");
8687 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8688 total_nodes = total_bytes = 0;
8691 auto_vec<unsigned> indices (all_kinds);
8692 for (unsigned i = 0; i < all_kinds; i++)
8693 indices.quick_push (i);
8694 indices.qsort (tree_nodes_cmp);
8696 for (unsigned i = 0; i < (int) all_kinds; i++)
8698 unsigned j = indices[i];
8699 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8700 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8701 SIZE_AMOUNT (tree_node_sizes[j]));
8702 total_nodes += tree_node_counts[j];
8703 total_bytes += tree_node_sizes[j];
8705 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8706 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8707 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8708 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8712 fprintf (stderr, "Code Nodes\n");
8713 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8715 auto_vec<unsigned> indices (MAX_TREE_CODES);
8716 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8717 indices.quick_push (i);
8718 indices.qsort (tree_codes_cmp);
8720 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8722 unsigned j = indices[i];
8723 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8724 get_tree_code_name ((enum tree_code) j),
8725 SIZE_AMOUNT (tree_code_counts[j]));
8727 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8728 fprintf (stderr, "\n");
8729 ssanames_print_statistics ();
8730 fprintf (stderr, "\n");
8731 phinodes_print_statistics ();
8732 fprintf (stderr, "\n");
8735 else
8736 fprintf (stderr, "(No per-node statistics)\n");
8738 print_type_hash_statistics ();
8739 print_debug_expr_statistics ();
8740 print_value_expr_statistics ();
8741 lang_hooks.print_statistics ();
8744 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8746 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8748 unsigned
8749 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8751 /* This relies on the raw feedback's top 4 bits being zero. */
8752 #define FEEDBACK(X) ((X) * 0x04c11db7)
8753 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8754 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8755 static const unsigned syndromes[16] =
8757 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8758 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8759 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8760 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8762 #undef FEEDBACK
8763 #undef SYNDROME
8765 value <<= (32 - bytes * 8);
8766 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8768 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8770 chksum = (chksum << 4) ^ feedback;
8773 return chksum;
8776 /* Generate a crc32 of a string. */
8778 unsigned
8779 crc32_string (unsigned chksum, const char *string)
8782 chksum = crc32_byte (chksum, *string);
8783 while (*string++);
8784 return chksum;
8787 /* P is a string that will be used in a symbol. Mask out any characters
8788 that are not valid in that context. */
8790 void
8791 clean_symbol_name (char *p)
8793 for (; *p; p++)
8794 if (! (ISALNUM (*p)
8795 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8796 || *p == '$'
8797 #endif
8798 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8799 || *p == '.'
8800 #endif
8802 *p = '_';
8805 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8807 /* Create a unique anonymous identifier. The identifier is still a
8808 valid assembly label. */
8810 tree
8811 make_anon_name ()
8813 const char *fmt =
8814 #if !defined (NO_DOT_IN_LABEL)
8816 #elif !defined (NO_DOLLAR_IN_LABEL)
8818 #else
8820 #endif
8821 "_anon_%d";
8823 char buf[24];
8824 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8825 gcc_checking_assert (len < int (sizeof (buf)));
8827 tree id = get_identifier_with_length (buf, len);
8828 IDENTIFIER_ANON_P (id) = true;
8830 return id;
8833 /* Generate a name for a special-purpose function.
8834 The generated name may need to be unique across the whole link.
8835 Changes to this function may also require corresponding changes to
8836 xstrdup_mask_random.
8837 TYPE is some string to identify the purpose of this function to the
8838 linker or collect2; it must start with an uppercase letter,
8839 one of:
8840 I - for constructors
8841 D - for destructors
8842 N - for C++ anonymous namespaces
8843 F - for DWARF unwind frame information. */
8845 tree
8846 get_file_function_name (const char *type)
8848 char *buf;
8849 const char *p;
8850 char *q;
8852 /* If we already have a name we know to be unique, just use that. */
8853 if (first_global_object_name)
8854 p = q = ASTRDUP (first_global_object_name);
8855 /* If the target is handling the constructors/destructors, they
8856 will be local to this file and the name is only necessary for
8857 debugging purposes.
8858 We also assign sub_I and sub_D sufixes to constructors called from
8859 the global static constructors. These are always local. */
8860 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8861 || (startswith (type, "sub_")
8862 && (type[4] == 'I' || type[4] == 'D')))
8864 const char *file = main_input_filename;
8865 if (! file)
8866 file = LOCATION_FILE (input_location);
8867 /* Just use the file's basename, because the full pathname
8868 might be quite long. */
8869 p = q = ASTRDUP (lbasename (file));
8871 else
8873 /* Otherwise, the name must be unique across the entire link.
8874 We don't have anything that we know to be unique to this translation
8875 unit, so use what we do have and throw in some randomness. */
8876 unsigned len;
8877 const char *name = weak_global_object_name;
8878 const char *file = main_input_filename;
8880 if (! name)
8881 name = "";
8882 if (! file)
8883 file = LOCATION_FILE (input_location);
8885 len = strlen (file);
8886 q = (char *) alloca (9 + 19 + len + 1);
8887 memcpy (q, file, len + 1);
8889 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8890 crc32_string (0, name), get_random_seed (false));
8892 p = q;
8895 clean_symbol_name (q);
8896 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8897 + strlen (type));
8899 /* Set up the name of the file-level functions we may need.
8900 Use a global object (which is already required to be unique over
8901 the program) rather than the file name (which imposes extra
8902 constraints). */
8903 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8905 return get_identifier (buf);
8908 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8910 /* Complain that the tree code of NODE does not match the expected 0
8911 terminated list of trailing codes. The trailing code list can be
8912 empty, for a more vague error message. FILE, LINE, and FUNCTION
8913 are of the caller. */
8915 void
8916 tree_check_failed (const_tree node, const char *file,
8917 int line, const char *function, ...)
8919 va_list args;
8920 const char *buffer;
8921 unsigned length = 0;
8922 enum tree_code code;
8924 va_start (args, function);
8925 while ((code = (enum tree_code) va_arg (args, int)))
8926 length += 4 + strlen (get_tree_code_name (code));
8927 va_end (args);
8928 if (length)
8930 char *tmp;
8931 va_start (args, function);
8932 length += strlen ("expected ");
8933 buffer = tmp = (char *) alloca (length);
8934 length = 0;
8935 while ((code = (enum tree_code) va_arg (args, int)))
8937 const char *prefix = length ? " or " : "expected ";
8939 strcpy (tmp + length, prefix);
8940 length += strlen (prefix);
8941 strcpy (tmp + length, get_tree_code_name (code));
8942 length += strlen (get_tree_code_name (code));
8944 va_end (args);
8946 else
8947 buffer = "unexpected node";
8949 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8950 buffer, get_tree_code_name (TREE_CODE (node)),
8951 function, trim_filename (file), line);
8954 /* Complain that the tree code of NODE does match the expected 0
8955 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8956 the caller. */
8958 void
8959 tree_not_check_failed (const_tree node, const char *file,
8960 int line, const char *function, ...)
8962 va_list args;
8963 char *buffer;
8964 unsigned length = 0;
8965 enum tree_code code;
8967 va_start (args, function);
8968 while ((code = (enum tree_code) va_arg (args, int)))
8969 length += 4 + strlen (get_tree_code_name (code));
8970 va_end (args);
8971 va_start (args, function);
8972 buffer = (char *) alloca (length);
8973 length = 0;
8974 while ((code = (enum tree_code) va_arg (args, int)))
8976 if (length)
8978 strcpy (buffer + length, " or ");
8979 length += 4;
8981 strcpy (buffer + length, get_tree_code_name (code));
8982 length += strlen (get_tree_code_name (code));
8984 va_end (args);
8986 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8987 buffer, get_tree_code_name (TREE_CODE (node)),
8988 function, trim_filename (file), line);
8991 /* Similar to tree_check_failed, except that we check for a class of tree
8992 code, given in CL. */
8994 void
8995 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8996 const char *file, int line, const char *function)
8998 internal_error
8999 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9000 TREE_CODE_CLASS_STRING (cl),
9001 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9002 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9005 /* Similar to tree_check_failed, except that instead of specifying a
9006 dozen codes, use the knowledge that they're all sequential. */
9008 void
9009 tree_range_check_failed (const_tree node, const char *file, int line,
9010 const char *function, enum tree_code c1,
9011 enum tree_code c2)
9013 char *buffer;
9014 unsigned length = 0;
9015 unsigned int c;
9017 for (c = c1; c <= c2; ++c)
9018 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9020 length += strlen ("expected ");
9021 buffer = (char *) alloca (length);
9022 length = 0;
9024 for (c = c1; c <= c2; ++c)
9026 const char *prefix = length ? " or " : "expected ";
9028 strcpy (buffer + length, prefix);
9029 length += strlen (prefix);
9030 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9031 length += strlen (get_tree_code_name ((enum tree_code) c));
9034 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9035 buffer, get_tree_code_name (TREE_CODE (node)),
9036 function, trim_filename (file), line);
9040 /* Similar to tree_check_failed, except that we check that a tree does
9041 not have the specified code, given in CL. */
9043 void
9044 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9045 const char *file, int line, const char *function)
9047 internal_error
9048 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9049 TREE_CODE_CLASS_STRING (cl),
9050 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9051 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9055 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9057 void
9058 omp_clause_check_failed (const_tree node, const char *file, int line,
9059 const char *function, enum omp_clause_code code)
9061 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9062 "in %s, at %s:%d",
9063 omp_clause_code_name[code],
9064 get_tree_code_name (TREE_CODE (node)),
9065 function, trim_filename (file), line);
9069 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9071 void
9072 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9073 const char *function, enum omp_clause_code c1,
9074 enum omp_clause_code c2)
9076 char *buffer;
9077 unsigned length = 0;
9078 unsigned int c;
9080 for (c = c1; c <= c2; ++c)
9081 length += 4 + strlen (omp_clause_code_name[c]);
9083 length += strlen ("expected ");
9084 buffer = (char *) alloca (length);
9085 length = 0;
9087 for (c = c1; c <= c2; ++c)
9089 const char *prefix = length ? " or " : "expected ";
9091 strcpy (buffer + length, prefix);
9092 length += strlen (prefix);
9093 strcpy (buffer + length, omp_clause_code_name[c]);
9094 length += strlen (omp_clause_code_name[c]);
9097 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9098 buffer, omp_clause_code_name[TREE_CODE (node)],
9099 function, trim_filename (file), line);
9103 #undef DEFTREESTRUCT
9104 #define DEFTREESTRUCT(VAL, NAME) NAME,
9106 static const char *ts_enum_names[] = {
9107 #include "treestruct.def"
9109 #undef DEFTREESTRUCT
9111 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9113 /* Similar to tree_class_check_failed, except that we check for
9114 whether CODE contains the tree structure identified by EN. */
9116 void
9117 tree_contains_struct_check_failed (const_tree node,
9118 const enum tree_node_structure_enum en,
9119 const char *file, int line,
9120 const char *function)
9122 internal_error
9123 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9124 TS_ENUM_NAME (en),
9125 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9129 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9130 (dynamically sized) vector. */
9132 void
9133 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9134 const char *function)
9136 internal_error
9137 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9138 "at %s:%d",
9139 idx + 1, len, function, trim_filename (file), line);
9142 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9143 (dynamically sized) vector. */
9145 void
9146 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9147 const char *function)
9149 internal_error
9150 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9151 idx + 1, len, function, trim_filename (file), line);
9154 /* Similar to above, except that the check is for the bounds of the operand
9155 vector of an expression node EXP. */
9157 void
9158 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9159 int line, const char *function)
9161 enum tree_code code = TREE_CODE (exp);
9162 internal_error
9163 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9164 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9165 function, trim_filename (file), line);
9168 /* Similar to above, except that the check is for the number of
9169 operands of an OMP_CLAUSE node. */
9171 void
9172 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9173 int line, const char *function)
9175 internal_error
9176 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9177 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9178 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9179 trim_filename (file), line);
9181 #endif /* ENABLE_TREE_CHECKING */
9183 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9184 and mapped to the machine mode MODE. Initialize its fields and build
9185 the information necessary for debugging output. */
9187 static tree
9188 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9190 tree t;
9191 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9193 t = make_node (VECTOR_TYPE);
9194 TREE_TYPE (t) = mv_innertype;
9195 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9196 SET_TYPE_MODE (t, mode);
9198 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9199 SET_TYPE_STRUCTURAL_EQUALITY (t);
9200 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9201 || mode != VOIDmode)
9202 && !VECTOR_BOOLEAN_TYPE_P (t))
9203 TYPE_CANONICAL (t)
9204 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9206 layout_type (t);
9208 hashval_t hash = type_hash_canon_hash (t);
9209 t = type_hash_canon (hash, t);
9211 /* We have built a main variant, based on the main variant of the
9212 inner type. Use it to build the variant we return. */
9213 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9214 && TREE_TYPE (t) != innertype)
9215 return build_type_attribute_qual_variant (t,
9216 TYPE_ATTRIBUTES (innertype),
9217 TYPE_QUALS (innertype));
9219 return t;
9222 static tree
9223 make_or_reuse_type (unsigned size, int unsignedp)
9225 int i;
9227 if (size == INT_TYPE_SIZE)
9228 return unsignedp ? unsigned_type_node : integer_type_node;
9229 if (size == CHAR_TYPE_SIZE)
9230 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9231 if (size == SHORT_TYPE_SIZE)
9232 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9233 if (size == LONG_TYPE_SIZE)
9234 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9235 if (size == LONG_LONG_TYPE_SIZE)
9236 return (unsignedp ? long_long_unsigned_type_node
9237 : long_long_integer_type_node);
9239 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9240 if (size == int_n_data[i].bitsize
9241 && int_n_enabled_p[i])
9242 return (unsignedp ? int_n_trees[i].unsigned_type
9243 : int_n_trees[i].signed_type);
9245 if (unsignedp)
9246 return make_unsigned_type (size);
9247 else
9248 return make_signed_type (size);
9251 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9253 static tree
9254 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9256 if (satp)
9258 if (size == SHORT_FRACT_TYPE_SIZE)
9259 return unsignedp ? sat_unsigned_short_fract_type_node
9260 : sat_short_fract_type_node;
9261 if (size == FRACT_TYPE_SIZE)
9262 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9263 if (size == LONG_FRACT_TYPE_SIZE)
9264 return unsignedp ? sat_unsigned_long_fract_type_node
9265 : sat_long_fract_type_node;
9266 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9267 return unsignedp ? sat_unsigned_long_long_fract_type_node
9268 : sat_long_long_fract_type_node;
9270 else
9272 if (size == SHORT_FRACT_TYPE_SIZE)
9273 return unsignedp ? unsigned_short_fract_type_node
9274 : short_fract_type_node;
9275 if (size == FRACT_TYPE_SIZE)
9276 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9277 if (size == LONG_FRACT_TYPE_SIZE)
9278 return unsignedp ? unsigned_long_fract_type_node
9279 : long_fract_type_node;
9280 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9281 return unsignedp ? unsigned_long_long_fract_type_node
9282 : long_long_fract_type_node;
9285 return make_fract_type (size, unsignedp, satp);
9288 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9290 static tree
9291 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9293 if (satp)
9295 if (size == SHORT_ACCUM_TYPE_SIZE)
9296 return unsignedp ? sat_unsigned_short_accum_type_node
9297 : sat_short_accum_type_node;
9298 if (size == ACCUM_TYPE_SIZE)
9299 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9300 if (size == LONG_ACCUM_TYPE_SIZE)
9301 return unsignedp ? sat_unsigned_long_accum_type_node
9302 : sat_long_accum_type_node;
9303 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9304 return unsignedp ? sat_unsigned_long_long_accum_type_node
9305 : sat_long_long_accum_type_node;
9307 else
9309 if (size == SHORT_ACCUM_TYPE_SIZE)
9310 return unsignedp ? unsigned_short_accum_type_node
9311 : short_accum_type_node;
9312 if (size == ACCUM_TYPE_SIZE)
9313 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9314 if (size == LONG_ACCUM_TYPE_SIZE)
9315 return unsignedp ? unsigned_long_accum_type_node
9316 : long_accum_type_node;
9317 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9318 return unsignedp ? unsigned_long_long_accum_type_node
9319 : long_long_accum_type_node;
9322 return make_accum_type (size, unsignedp, satp);
9326 /* Create an atomic variant node for TYPE. This routine is called
9327 during initialization of data types to create the 5 basic atomic
9328 types. The generic build_variant_type function requires these to
9329 already be set up in order to function properly, so cannot be
9330 called from there. If ALIGN is non-zero, then ensure alignment is
9331 overridden to this value. */
9333 static tree
9334 build_atomic_base (tree type, unsigned int align)
9336 tree t;
9338 /* Make sure its not already registered. */
9339 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9340 return t;
9342 t = build_variant_type_copy (type);
9343 set_type_quals (t, TYPE_QUAL_ATOMIC);
9345 if (align)
9346 SET_TYPE_ALIGN (t, align);
9348 return t;
9351 /* Information about the _FloatN and _FloatNx types. This must be in
9352 the same order as the corresponding TI_* enum values. */
9353 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9355 { 16, false },
9356 { 32, false },
9357 { 64, false },
9358 { 128, false },
9359 { 32, true },
9360 { 64, true },
9361 { 128, true },
9365 /* Create nodes for all integer types (and error_mark_node) using the sizes
9366 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9368 void
9369 build_common_tree_nodes (bool signed_char)
9371 int i;
9373 error_mark_node = make_node (ERROR_MARK);
9374 TREE_TYPE (error_mark_node) = error_mark_node;
9376 initialize_sizetypes ();
9378 /* Define both `signed char' and `unsigned char'. */
9379 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9380 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9381 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9382 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9384 /* Define `char', which is like either `signed char' or `unsigned char'
9385 but not the same as either. */
9386 char_type_node
9387 = (signed_char
9388 ? make_signed_type (CHAR_TYPE_SIZE)
9389 : make_unsigned_type (CHAR_TYPE_SIZE));
9390 TYPE_STRING_FLAG (char_type_node) = 1;
9392 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9393 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9394 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9395 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9396 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9397 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9398 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9399 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9401 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9403 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9404 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9406 if (int_n_enabled_p[i])
9408 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9409 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9413 /* Define a boolean type. This type only represents boolean values but
9414 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9415 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9416 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9417 TYPE_PRECISION (boolean_type_node) = 1;
9418 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9420 /* Define what type to use for size_t. */
9421 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9422 size_type_node = unsigned_type_node;
9423 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9424 size_type_node = long_unsigned_type_node;
9425 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9426 size_type_node = long_long_unsigned_type_node;
9427 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9428 size_type_node = short_unsigned_type_node;
9429 else
9431 int i;
9433 size_type_node = NULL_TREE;
9434 for (i = 0; i < NUM_INT_N_ENTS; i++)
9435 if (int_n_enabled_p[i])
9437 char name[50], altname[50];
9438 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9439 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9441 if (strcmp (name, SIZE_TYPE) == 0
9442 || strcmp (altname, SIZE_TYPE) == 0)
9444 size_type_node = int_n_trees[i].unsigned_type;
9447 if (size_type_node == NULL_TREE)
9448 gcc_unreachable ();
9451 /* Define what type to use for ptrdiff_t. */
9452 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9453 ptrdiff_type_node = integer_type_node;
9454 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9455 ptrdiff_type_node = long_integer_type_node;
9456 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9457 ptrdiff_type_node = long_long_integer_type_node;
9458 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9459 ptrdiff_type_node = short_integer_type_node;
9460 else
9462 ptrdiff_type_node = NULL_TREE;
9463 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9464 if (int_n_enabled_p[i])
9466 char name[50], altname[50];
9467 sprintf (name, "__int%d", int_n_data[i].bitsize);
9468 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9470 if (strcmp (name, PTRDIFF_TYPE) == 0
9471 || strcmp (altname, PTRDIFF_TYPE) == 0)
9472 ptrdiff_type_node = int_n_trees[i].signed_type;
9474 if (ptrdiff_type_node == NULL_TREE)
9475 gcc_unreachable ();
9478 /* Fill in the rest of the sized types. Reuse existing type nodes
9479 when possible. */
9480 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9481 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9482 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9483 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9484 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9486 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9487 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9488 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9489 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9490 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9492 /* Don't call build_qualified type for atomics. That routine does
9493 special processing for atomics, and until they are initialized
9494 it's better not to make that call.
9496 Check to see if there is a target override for atomic types. */
9498 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9499 targetm.atomic_align_for_mode (QImode));
9500 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9501 targetm.atomic_align_for_mode (HImode));
9502 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9503 targetm.atomic_align_for_mode (SImode));
9504 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9505 targetm.atomic_align_for_mode (DImode));
9506 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9507 targetm.atomic_align_for_mode (TImode));
9509 access_public_node = get_identifier ("public");
9510 access_protected_node = get_identifier ("protected");
9511 access_private_node = get_identifier ("private");
9513 /* Define these next since types below may used them. */
9514 integer_zero_node = build_int_cst (integer_type_node, 0);
9515 integer_one_node = build_int_cst (integer_type_node, 1);
9516 integer_three_node = build_int_cst (integer_type_node, 3);
9517 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9519 size_zero_node = size_int (0);
9520 size_one_node = size_int (1);
9521 bitsize_zero_node = bitsize_int (0);
9522 bitsize_one_node = bitsize_int (1);
9523 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9525 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9526 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9528 void_type_node = make_node (VOID_TYPE);
9529 layout_type (void_type_node);
9531 /* We are not going to have real types in C with less than byte alignment,
9532 so we might as well not have any types that claim to have it. */
9533 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9534 TYPE_USER_ALIGN (void_type_node) = 0;
9536 void_node = make_node (VOID_CST);
9537 TREE_TYPE (void_node) = void_type_node;
9539 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9541 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9542 layout_type (TREE_TYPE (null_pointer_node));
9544 ptr_type_node = build_pointer_type (void_type_node);
9545 const_ptr_type_node
9546 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9547 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9548 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9550 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9552 float_type_node = make_node (REAL_TYPE);
9553 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9554 layout_type (float_type_node);
9556 double_type_node = make_node (REAL_TYPE);
9557 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9558 layout_type (double_type_node);
9560 long_double_type_node = make_node (REAL_TYPE);
9561 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9562 layout_type (long_double_type_node);
9564 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9566 int n = floatn_nx_types[i].n;
9567 bool extended = floatn_nx_types[i].extended;
9568 scalar_float_mode mode;
9569 if (!targetm.floatn_mode (n, extended).exists (&mode))
9570 continue;
9571 int precision = GET_MODE_PRECISION (mode);
9572 /* Work around the rs6000 KFmode having precision 113 not
9573 128. */
9574 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9575 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9576 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9577 if (!extended)
9578 gcc_assert (min_precision == n);
9579 if (precision < min_precision)
9580 precision = min_precision;
9581 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9582 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9583 layout_type (FLOATN_NX_TYPE_NODE (i));
9584 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9586 float128t_type_node = float128_type_node;
9587 #ifdef HAVE_BFmode
9588 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9589 && targetm.scalar_mode_supported_p (BFmode)
9590 && targetm.libgcc_floating_mode_supported_p (BFmode))
9592 bfloat16_type_node = make_node (REAL_TYPE);
9593 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9594 layout_type (bfloat16_type_node);
9595 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9597 #endif
9599 float_ptr_type_node = build_pointer_type (float_type_node);
9600 double_ptr_type_node = build_pointer_type (double_type_node);
9601 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9602 integer_ptr_type_node = build_pointer_type (integer_type_node);
9604 /* Fixed size integer types. */
9605 uint16_type_node = make_or_reuse_type (16, 1);
9606 uint32_type_node = make_or_reuse_type (32, 1);
9607 uint64_type_node = make_or_reuse_type (64, 1);
9608 if (targetm.scalar_mode_supported_p (TImode))
9609 uint128_type_node = make_or_reuse_type (128, 1);
9611 /* Decimal float types. */
9612 if (targetm.decimal_float_supported_p ())
9614 dfloat32_type_node = make_node (REAL_TYPE);
9615 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9616 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9617 layout_type (dfloat32_type_node);
9619 dfloat64_type_node = make_node (REAL_TYPE);
9620 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9621 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9622 layout_type (dfloat64_type_node);
9624 dfloat128_type_node = make_node (REAL_TYPE);
9625 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9626 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9627 layout_type (dfloat128_type_node);
9630 complex_integer_type_node = build_complex_type (integer_type_node, true);
9631 complex_float_type_node = build_complex_type (float_type_node, true);
9632 complex_double_type_node = build_complex_type (double_type_node, true);
9633 complex_long_double_type_node = build_complex_type (long_double_type_node,
9634 true);
9636 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9638 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9639 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9640 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9643 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9644 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9645 sat_ ## KIND ## _type_node = \
9646 make_sat_signed_ ## KIND ## _type (SIZE); \
9647 sat_unsigned_ ## KIND ## _type_node = \
9648 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9649 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9650 unsigned_ ## KIND ## _type_node = \
9651 make_unsigned_ ## KIND ## _type (SIZE);
9653 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9654 sat_ ## WIDTH ## KIND ## _type_node = \
9655 make_sat_signed_ ## KIND ## _type (SIZE); \
9656 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9657 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9658 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9659 unsigned_ ## WIDTH ## KIND ## _type_node = \
9660 make_unsigned_ ## KIND ## _type (SIZE);
9662 /* Make fixed-point type nodes based on four different widths. */
9663 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9664 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9665 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9666 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9667 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9669 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9670 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9671 NAME ## _type_node = \
9672 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9673 u ## NAME ## _type_node = \
9674 make_or_reuse_unsigned_ ## KIND ## _type \
9675 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9676 sat_ ## NAME ## _type_node = \
9677 make_or_reuse_sat_signed_ ## KIND ## _type \
9678 (GET_MODE_BITSIZE (MODE ## mode)); \
9679 sat_u ## NAME ## _type_node = \
9680 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9681 (GET_MODE_BITSIZE (U ## MODE ## mode));
9683 /* Fixed-point type and mode nodes. */
9684 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9685 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9686 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9687 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9688 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9689 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9690 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9691 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9692 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9693 MAKE_FIXED_MODE_NODE (accum, da, DA)
9694 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9697 tree t = targetm.build_builtin_va_list ();
9699 /* Many back-ends define record types without setting TYPE_NAME.
9700 If we copied the record type here, we'd keep the original
9701 record type without a name. This breaks name mangling. So,
9702 don't copy record types and let c_common_nodes_and_builtins()
9703 declare the type to be __builtin_va_list. */
9704 if (TREE_CODE (t) != RECORD_TYPE)
9705 t = build_variant_type_copy (t);
9707 va_list_type_node = t;
9710 /* SCEV analyzer global shared trees. */
9711 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9712 TREE_TYPE (chrec_dont_know) = void_type_node;
9713 chrec_known = make_node (SCEV_KNOWN);
9714 TREE_TYPE (chrec_known) = void_type_node;
9717 /* Modify DECL for given flags.
9718 TM_PURE attribute is set only on types, so the function will modify
9719 DECL's type when ECF_TM_PURE is used. */
9721 void
9722 set_call_expr_flags (tree decl, int flags)
9724 if (flags & ECF_NOTHROW)
9725 TREE_NOTHROW (decl) = 1;
9726 if (flags & ECF_CONST)
9727 TREE_READONLY (decl) = 1;
9728 if (flags & ECF_PURE)
9729 DECL_PURE_P (decl) = 1;
9730 if (flags & ECF_LOOPING_CONST_OR_PURE)
9731 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9732 if (flags & ECF_NOVOPS)
9733 DECL_IS_NOVOPS (decl) = 1;
9734 if (flags & ECF_NORETURN)
9735 TREE_THIS_VOLATILE (decl) = 1;
9736 if (flags & ECF_MALLOC)
9737 DECL_IS_MALLOC (decl) = 1;
9738 if (flags & ECF_RETURNS_TWICE)
9739 DECL_IS_RETURNS_TWICE (decl) = 1;
9740 if (flags & ECF_LEAF)
9741 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9742 NULL, DECL_ATTRIBUTES (decl));
9743 if (flags & ECF_COLD)
9744 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9745 NULL, DECL_ATTRIBUTES (decl));
9746 if (flags & ECF_RET1)
9747 DECL_ATTRIBUTES (decl)
9748 = tree_cons (get_identifier ("fn spec"),
9749 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9750 DECL_ATTRIBUTES (decl));
9751 if ((flags & ECF_TM_PURE) && flag_tm)
9752 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9753 if ((flags & ECF_XTHROW))
9754 DECL_ATTRIBUTES (decl)
9755 = tree_cons (get_identifier ("expected_throw"),
9756 NULL, DECL_ATTRIBUTES (decl));
9757 /* Looping const or pure is implied by noreturn.
9758 There is currently no way to declare looping const or looping pure alone. */
9759 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9760 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9764 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9766 static void
9767 local_define_builtin (const char *name, tree type, enum built_in_function code,
9768 const char *library_name, int ecf_flags)
9770 tree decl;
9772 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9773 library_name, NULL_TREE);
9774 set_call_expr_flags (decl, ecf_flags);
9776 set_builtin_decl (code, decl, true);
9779 /* Call this function after instantiating all builtins that the language
9780 front end cares about. This will build the rest of the builtins
9781 and internal functions that are relied upon by the tree optimizers and
9782 the middle-end. */
9784 void
9785 build_common_builtin_nodes (void)
9787 tree tmp, ftype;
9788 int ecf_flags;
9790 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9792 ftype = build_function_type_list (void_type_node,
9793 ptr_type_node,
9794 ptr_type_node,
9795 integer_type_node,
9796 NULL_TREE);
9797 local_define_builtin ("__builtin_clear_padding", ftype,
9798 BUILT_IN_CLEAR_PADDING,
9799 "__builtin_clear_padding",
9800 ECF_LEAF | ECF_NOTHROW);
9803 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9804 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9805 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9806 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9808 ftype = build_function_type (void_type_node, void_list_node);
9809 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9810 local_define_builtin ("__builtin_unreachable", ftype,
9811 BUILT_IN_UNREACHABLE,
9812 "__builtin_unreachable",
9813 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9814 | ECF_CONST | ECF_COLD);
9815 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9816 local_define_builtin ("__builtin_unreachable trap", ftype,
9817 BUILT_IN_UNREACHABLE_TRAP,
9818 "__builtin_unreachable trap",
9819 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9820 | ECF_CONST | ECF_COLD);
9821 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9822 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9823 "abort",
9824 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9825 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9826 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9827 "__builtin_trap",
9828 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9831 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9832 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9834 ftype = build_function_type_list (ptr_type_node,
9835 ptr_type_node, const_ptr_type_node,
9836 size_type_node, NULL_TREE);
9838 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9839 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9840 "memcpy", ECF_NOTHROW | ECF_LEAF);
9841 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9842 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9843 "memmove", ECF_NOTHROW | ECF_LEAF);
9846 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9848 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9849 const_ptr_type_node, size_type_node,
9850 NULL_TREE);
9851 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9852 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9855 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9857 ftype = build_function_type_list (ptr_type_node,
9858 ptr_type_node, integer_type_node,
9859 size_type_node, NULL_TREE);
9860 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9861 "memset", ECF_NOTHROW | ECF_LEAF);
9864 /* If we're checking the stack, `alloca' can throw. */
9865 const int alloca_flags
9866 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9868 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9870 ftype = build_function_type_list (ptr_type_node,
9871 size_type_node, NULL_TREE);
9872 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9873 "alloca", alloca_flags);
9876 ftype = build_function_type_list (ptr_type_node, size_type_node,
9877 size_type_node, NULL_TREE);
9878 local_define_builtin ("__builtin_alloca_with_align", ftype,
9879 BUILT_IN_ALLOCA_WITH_ALIGN,
9880 "__builtin_alloca_with_align",
9881 alloca_flags);
9883 ftype = build_function_type_list (ptr_type_node, size_type_node,
9884 size_type_node, size_type_node, NULL_TREE);
9885 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9886 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9887 "__builtin_alloca_with_align_and_max",
9888 alloca_flags);
9890 ftype = build_function_type_list (void_type_node,
9891 ptr_type_node, ptr_type_node,
9892 ptr_type_node, NULL_TREE);
9893 local_define_builtin ("__builtin_init_trampoline", ftype,
9894 BUILT_IN_INIT_TRAMPOLINE,
9895 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9896 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9897 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9898 "__builtin_init_heap_trampoline",
9899 ECF_NOTHROW | ECF_LEAF);
9900 local_define_builtin ("__builtin_init_descriptor", ftype,
9901 BUILT_IN_INIT_DESCRIPTOR,
9902 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9904 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9905 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9906 BUILT_IN_ADJUST_TRAMPOLINE,
9907 "__builtin_adjust_trampoline",
9908 ECF_CONST | ECF_NOTHROW);
9909 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9910 BUILT_IN_ADJUST_DESCRIPTOR,
9911 "__builtin_adjust_descriptor",
9912 ECF_CONST | ECF_NOTHROW);
9914 ftype = build_function_type_list (void_type_node,
9915 ptr_type_node, ptr_type_node, NULL_TREE);
9916 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9917 local_define_builtin ("__builtin___clear_cache", ftype,
9918 BUILT_IN_CLEAR_CACHE,
9919 "__clear_cache",
9920 ECF_NOTHROW);
9922 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9923 BUILT_IN_NONLOCAL_GOTO,
9924 "__builtin_nonlocal_goto",
9925 ECF_NORETURN | ECF_NOTHROW);
9927 tree ptr_ptr_type_node = build_pointer_type (ptr_type_node);
9929 ftype = build_function_type_list (void_type_node,
9930 ptr_type_node, // void *chain
9931 ptr_type_node, // void *func
9932 ptr_ptr_type_node, // void **dst
9933 NULL_TREE);
9934 local_define_builtin ("__builtin_nested_func_ptr_created", ftype,
9935 BUILT_IN_NESTED_PTR_CREATED,
9936 "__builtin_nested_func_ptr_created", ECF_NOTHROW);
9938 ftype = build_function_type_list (void_type_node,
9939 NULL_TREE);
9940 local_define_builtin ("__builtin_nested_func_ptr_deleted", ftype,
9941 BUILT_IN_NESTED_PTR_DELETED,
9942 "__builtin_nested_func_ptr_deleted", ECF_NOTHROW);
9944 ftype = build_function_type_list (void_type_node,
9945 ptr_type_node, ptr_type_node, NULL_TREE);
9946 local_define_builtin ("__builtin_setjmp_setup", ftype,
9947 BUILT_IN_SETJMP_SETUP,
9948 "__builtin_setjmp_setup", ECF_NOTHROW);
9950 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9951 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9952 BUILT_IN_SETJMP_RECEIVER,
9953 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9955 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9956 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9957 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9959 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9960 local_define_builtin ("__builtin_stack_restore", ftype,
9961 BUILT_IN_STACK_RESTORE,
9962 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9964 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9965 const_ptr_type_node, size_type_node,
9966 NULL_TREE);
9967 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9968 "__builtin_memcmp_eq",
9969 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9971 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9972 "__builtin_strncmp_eq",
9973 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9975 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9976 "__builtin_strcmp_eq",
9977 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9979 /* If there's a possibility that we might use the ARM EABI, build the
9980 alternate __cxa_end_cleanup node used to resume from C++. */
9981 if (targetm.arm_eabi_unwinder)
9983 ftype = build_function_type_list (void_type_node, NULL_TREE);
9984 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9985 BUILT_IN_CXA_END_CLEANUP,
9986 "__cxa_end_cleanup",
9987 ECF_NORETURN | ECF_XTHROW | ECF_LEAF);
9990 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9991 local_define_builtin ("__builtin_unwind_resume", ftype,
9992 BUILT_IN_UNWIND_RESUME,
9993 ((targetm_common.except_unwind_info (&global_options)
9994 == UI_SJLJ)
9995 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9996 ECF_NORETURN | ECF_XTHROW);
9998 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10000 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10001 NULL_TREE);
10002 local_define_builtin ("__builtin_return_address", ftype,
10003 BUILT_IN_RETURN_ADDRESS,
10004 "__builtin_return_address",
10005 ECF_NOTHROW);
10008 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10009 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10011 ftype = build_function_type_list (void_type_node, ptr_type_node,
10012 ptr_type_node, NULL_TREE);
10013 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10014 local_define_builtin ("__cyg_profile_func_enter", ftype,
10015 BUILT_IN_PROFILE_FUNC_ENTER,
10016 "__cyg_profile_func_enter", 0);
10017 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10018 local_define_builtin ("__cyg_profile_func_exit", ftype,
10019 BUILT_IN_PROFILE_FUNC_EXIT,
10020 "__cyg_profile_func_exit", 0);
10023 /* The exception object and filter values from the runtime. The argument
10024 must be zero before exception lowering, i.e. from the front end. After
10025 exception lowering, it will be the region number for the exception
10026 landing pad. These functions are PURE instead of CONST to prevent
10027 them from being hoisted past the exception edge that will initialize
10028 its value in the landing pad. */
10029 ftype = build_function_type_list (ptr_type_node,
10030 integer_type_node, NULL_TREE);
10031 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10032 /* Only use TM_PURE if we have TM language support. */
10033 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10034 ecf_flags |= ECF_TM_PURE;
10035 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10036 "__builtin_eh_pointer", ecf_flags);
10038 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10039 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10040 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10041 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10043 ftype = build_function_type_list (void_type_node,
10044 integer_type_node, integer_type_node,
10045 NULL_TREE);
10046 local_define_builtin ("__builtin_eh_copy_values", ftype,
10047 BUILT_IN_EH_COPY_VALUES,
10048 "__builtin_eh_copy_values", ECF_NOTHROW);
10050 /* Complex multiplication and division. These are handled as builtins
10051 rather than optabs because emit_library_call_value doesn't support
10052 complex. Further, we can do slightly better with folding these
10053 beasties if the real and complex parts of the arguments are separate. */
10055 int mode;
10057 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10059 char mode_name_buf[4], *q;
10060 const char *p;
10061 enum built_in_function mcode, dcode;
10062 tree type, inner_type;
10063 const char *prefix = "__";
10065 if (targetm.libfunc_gnu_prefix)
10066 prefix = "__gnu_";
10068 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10069 if (type == NULL)
10070 continue;
10071 inner_type = TREE_TYPE (type);
10073 ftype = build_function_type_list (type, inner_type, inner_type,
10074 inner_type, inner_type, NULL_TREE);
10076 mcode = ((enum built_in_function)
10077 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10078 dcode = ((enum built_in_function)
10079 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10081 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10082 *q = TOLOWER (*p);
10083 *q = '\0';
10085 /* For -ftrapping-math these should throw from a former
10086 -fnon-call-exception stmt. */
10087 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10088 NULL);
10089 local_define_builtin (built_in_names[mcode], ftype, mcode,
10090 built_in_names[mcode],
10091 ECF_CONST | ECF_LEAF);
10093 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10094 NULL);
10095 local_define_builtin (built_in_names[dcode], ftype, dcode,
10096 built_in_names[dcode],
10097 ECF_CONST | ECF_LEAF);
10101 init_internal_fns ();
10104 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10105 better way.
10107 If we requested a pointer to a vector, build up the pointers that
10108 we stripped off while looking for the inner type. Similarly for
10109 return values from functions.
10111 The argument TYPE is the top of the chain, and BOTTOM is the
10112 new type which we will point to. */
10114 tree
10115 reconstruct_complex_type (tree type, tree bottom)
10117 tree inner, outer;
10119 if (TREE_CODE (type) == POINTER_TYPE)
10121 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10122 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10123 TYPE_REF_CAN_ALIAS_ALL (type));
10125 else if (TREE_CODE (type) == REFERENCE_TYPE)
10127 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10128 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10129 TYPE_REF_CAN_ALIAS_ALL (type));
10131 else if (TREE_CODE (type) == ARRAY_TYPE)
10133 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10134 outer = build_array_type (inner, TYPE_DOMAIN (type));
10136 else if (TREE_CODE (type) == FUNCTION_TYPE)
10138 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10139 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10140 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10142 else if (TREE_CODE (type) == METHOD_TYPE)
10144 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10145 /* The build_method_type_directly() routine prepends 'this' to argument list,
10146 so we must compensate by getting rid of it. */
10147 outer
10148 = build_method_type_directly
10149 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10150 inner,
10151 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10153 else if (TREE_CODE (type) == OFFSET_TYPE)
10155 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10156 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10158 else
10159 return bottom;
10161 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10162 TYPE_QUALS (type));
10165 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10166 the inner type. */
10167 tree
10168 build_vector_type_for_mode (tree innertype, machine_mode mode)
10170 poly_int64 nunits;
10171 unsigned int bitsize;
10173 switch (GET_MODE_CLASS (mode))
10175 case MODE_VECTOR_BOOL:
10176 case MODE_VECTOR_INT:
10177 case MODE_VECTOR_FLOAT:
10178 case MODE_VECTOR_FRACT:
10179 case MODE_VECTOR_UFRACT:
10180 case MODE_VECTOR_ACCUM:
10181 case MODE_VECTOR_UACCUM:
10182 nunits = GET_MODE_NUNITS (mode);
10183 break;
10185 case MODE_INT:
10186 /* Check that there are no leftover bits. */
10187 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10188 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10189 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10190 break;
10192 default:
10193 gcc_unreachable ();
10196 return make_vector_type (innertype, nunits, mode);
10199 /* Similarly, but takes the inner type and number of units, which must be
10200 a power of two. */
10202 tree
10203 build_vector_type (tree innertype, poly_int64 nunits)
10205 return make_vector_type (innertype, nunits, VOIDmode);
10208 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10210 tree
10211 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10213 gcc_assert (mask_mode != BLKmode);
10215 unsigned HOST_WIDE_INT esize;
10216 if (VECTOR_MODE_P (mask_mode))
10218 poly_uint64 vsize = GET_MODE_PRECISION (mask_mode);
10219 esize = vector_element_size (vsize, nunits);
10221 else
10222 esize = 1;
10224 tree bool_type = build_nonstandard_boolean_type (esize);
10226 return make_vector_type (bool_type, nunits, mask_mode);
10229 /* Build a vector type that holds one boolean result for each element of
10230 vector type VECTYPE. The public interface for this operation is
10231 truth_type_for. */
10233 static tree
10234 build_truth_vector_type_for (tree vectype)
10236 machine_mode vector_mode = TYPE_MODE (vectype);
10237 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10239 machine_mode mask_mode;
10240 if (VECTOR_MODE_P (vector_mode)
10241 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10242 return build_truth_vector_type_for_mode (nunits, mask_mode);
10244 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10245 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10246 tree bool_type = build_nonstandard_boolean_type (esize);
10248 return make_vector_type (bool_type, nunits, VOIDmode);
10251 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10252 set. */
10254 tree
10255 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10257 tree t = make_vector_type (innertype, nunits, VOIDmode);
10258 tree cand;
10259 /* We always build the non-opaque variant before the opaque one,
10260 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10261 cand = TYPE_NEXT_VARIANT (t);
10262 if (cand
10263 && TYPE_VECTOR_OPAQUE (cand)
10264 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10265 return cand;
10266 /* Othewise build a variant type and make sure to queue it after
10267 the non-opaque type. */
10268 cand = build_distinct_type_copy (t);
10269 TYPE_VECTOR_OPAQUE (cand) = true;
10270 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10271 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10272 TYPE_NEXT_VARIANT (t) = cand;
10273 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10274 return cand;
10277 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10279 static poly_wide_int
10280 vector_cst_int_elt (const_tree t, unsigned int i)
10282 /* First handle elements that are directly encoded. */
10283 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10284 if (i < encoded_nelts)
10285 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10287 /* Identify the pattern that contains element I and work out the index of
10288 the last encoded element for that pattern. */
10289 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10290 unsigned int pattern = i % npatterns;
10291 unsigned int count = i / npatterns;
10292 unsigned int final_i = encoded_nelts - npatterns + pattern;
10294 /* If there are no steps, the final encoded value is the right one. */
10295 if (!VECTOR_CST_STEPPED_P (t))
10296 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10298 /* Otherwise work out the value from the last two encoded elements. */
10299 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10300 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10301 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10302 return wi::to_poly_wide (v2) + (count - 2) * diff;
10305 /* Return the value of element I of VECTOR_CST T. */
10307 tree
10308 vector_cst_elt (const_tree t, unsigned int i)
10310 /* First handle elements that are directly encoded. */
10311 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10312 if (i < encoded_nelts)
10313 return VECTOR_CST_ENCODED_ELT (t, i);
10315 /* If there are no steps, the final encoded value is the right one. */
10316 if (!VECTOR_CST_STEPPED_P (t))
10318 /* Identify the pattern that contains element I and work out the index of
10319 the last encoded element for that pattern. */
10320 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10321 unsigned int pattern = i % npatterns;
10322 unsigned int final_i = encoded_nelts - npatterns + pattern;
10323 return VECTOR_CST_ENCODED_ELT (t, final_i);
10326 /* Otherwise work out the value from the last two encoded elements. */
10327 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10328 vector_cst_int_elt (t, i));
10331 /* Given an initializer INIT, return TRUE if INIT is zero or some
10332 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10333 null, set *NONZERO if and only if INIT is known not to be all
10334 zeros. The combination of return value of false and *NONZERO
10335 false implies that INIT may but need not be all zeros. Other
10336 combinations indicate definitive answers. */
10338 bool
10339 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10341 bool dummy;
10342 if (!nonzero)
10343 nonzero = &dummy;
10345 /* Conservatively clear NONZERO and set it only if INIT is definitely
10346 not all zero. */
10347 *nonzero = false;
10349 STRIP_NOPS (init);
10351 unsigned HOST_WIDE_INT off = 0;
10353 switch (TREE_CODE (init))
10355 case INTEGER_CST:
10356 if (integer_zerop (init))
10357 return true;
10359 *nonzero = true;
10360 return false;
10362 case REAL_CST:
10363 /* ??? Note that this is not correct for C4X float formats. There,
10364 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10365 negative exponent. */
10366 if (real_zerop (init)
10367 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10368 return true;
10370 *nonzero = true;
10371 return false;
10373 case FIXED_CST:
10374 if (fixed_zerop (init))
10375 return true;
10377 *nonzero = true;
10378 return false;
10380 case COMPLEX_CST:
10381 if (integer_zerop (init)
10382 || (real_zerop (init)
10383 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10384 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10385 return true;
10387 *nonzero = true;
10388 return false;
10390 case VECTOR_CST:
10391 if (VECTOR_CST_NPATTERNS (init) == 1
10392 && VECTOR_CST_DUPLICATE_P (init)
10393 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10394 return true;
10396 *nonzero = true;
10397 return false;
10399 case CONSTRUCTOR:
10401 if (TREE_CLOBBER_P (init))
10402 return false;
10404 unsigned HOST_WIDE_INT idx;
10405 tree elt;
10407 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10408 if (!initializer_zerop (elt, nonzero))
10409 return false;
10411 return true;
10414 case MEM_REF:
10416 tree arg = TREE_OPERAND (init, 0);
10417 if (TREE_CODE (arg) != ADDR_EXPR)
10418 return false;
10419 tree offset = TREE_OPERAND (init, 1);
10420 if (TREE_CODE (offset) != INTEGER_CST
10421 || !tree_fits_uhwi_p (offset))
10422 return false;
10423 off = tree_to_uhwi (offset);
10424 if (INT_MAX < off)
10425 return false;
10426 arg = TREE_OPERAND (arg, 0);
10427 if (TREE_CODE (arg) != STRING_CST)
10428 return false;
10429 init = arg;
10431 /* Fall through. */
10433 case STRING_CST:
10435 gcc_assert (off <= INT_MAX);
10437 int i = off;
10438 int n = TREE_STRING_LENGTH (init);
10439 if (n <= i)
10440 return false;
10442 /* We need to loop through all elements to handle cases like
10443 "\0" and "\0foobar". */
10444 for (i = 0; i < n; ++i)
10445 if (TREE_STRING_POINTER (init)[i] != '\0')
10447 *nonzero = true;
10448 return false;
10451 return true;
10454 default:
10455 return false;
10459 /* Return true if EXPR is an initializer expression in which every element
10460 is a constant that is numerically equal to 0 or 1. The elements do not
10461 need to be equal to each other. */
10463 bool
10464 initializer_each_zero_or_onep (const_tree expr)
10466 STRIP_ANY_LOCATION_WRAPPER (expr);
10468 switch (TREE_CODE (expr))
10470 case INTEGER_CST:
10471 return integer_zerop (expr) || integer_onep (expr);
10473 case REAL_CST:
10474 return real_zerop (expr) || real_onep (expr);
10476 case VECTOR_CST:
10478 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10479 if (VECTOR_CST_STEPPED_P (expr)
10480 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10481 return false;
10483 for (unsigned int i = 0; i < nelts; ++i)
10485 tree elt = vector_cst_elt (expr, i);
10486 if (!initializer_each_zero_or_onep (elt))
10487 return false;
10490 return true;
10493 default:
10494 return false;
10498 /* Check if vector VEC consists of all the equal elements and
10499 that the number of elements corresponds to the type of VEC.
10500 The function returns first element of the vector
10501 or NULL_TREE if the vector is not uniform. */
10502 tree
10503 uniform_vector_p (const_tree vec)
10505 tree first, t;
10506 unsigned HOST_WIDE_INT i, nelts;
10508 if (vec == NULL_TREE)
10509 return NULL_TREE;
10511 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10513 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10514 return TREE_OPERAND (vec, 0);
10516 else if (TREE_CODE (vec) == VECTOR_CST)
10518 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10519 return VECTOR_CST_ENCODED_ELT (vec, 0);
10520 return NULL_TREE;
10523 else if (TREE_CODE (vec) == CONSTRUCTOR
10524 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10526 first = error_mark_node;
10528 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10530 if (i == 0)
10532 first = t;
10533 continue;
10535 if (!operand_equal_p (first, t, 0))
10536 return NULL_TREE;
10538 if (i != nelts)
10539 return NULL_TREE;
10541 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10542 return uniform_vector_p (first);
10543 return first;
10546 return NULL_TREE;
10549 /* If the argument is INTEGER_CST, return it. If the argument is vector
10550 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10551 return NULL_TREE.
10552 Look through location wrappers. */
10554 tree
10555 uniform_integer_cst_p (tree t)
10557 STRIP_ANY_LOCATION_WRAPPER (t);
10559 if (TREE_CODE (t) == INTEGER_CST)
10560 return t;
10562 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10564 t = uniform_vector_p (t);
10565 if (t && TREE_CODE (t) == INTEGER_CST)
10566 return t;
10569 return NULL_TREE;
10572 /* Checks to see if T is a constant or a constant vector and if each element E
10573 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10575 tree
10576 bitmask_inv_cst_vector_p (tree t)
10579 tree_code code = TREE_CODE (t);
10580 tree type = TREE_TYPE (t);
10582 if (!INTEGRAL_TYPE_P (type)
10583 && !VECTOR_INTEGER_TYPE_P (type))
10584 return NULL_TREE;
10586 unsigned HOST_WIDE_INT nelts = 1;
10587 tree cst;
10588 unsigned int idx = 0;
10589 bool uniform = uniform_integer_cst_p (t);
10590 tree newtype = unsigned_type_for (type);
10591 tree_vector_builder builder;
10592 if (code == INTEGER_CST)
10593 cst = t;
10594 else
10596 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10597 return NULL_TREE;
10599 cst = vector_cst_elt (t, 0);
10600 builder.new_vector (newtype, nelts, 1);
10603 tree ty = unsigned_type_for (TREE_TYPE (cst));
10607 if (idx > 0)
10608 cst = vector_cst_elt (t, idx);
10609 wide_int icst = wi::to_wide (cst);
10610 wide_int inv = wi::bit_not (icst);
10611 icst = wi::add (1, inv);
10612 if (wi::popcount (icst) != 1)
10613 return NULL_TREE;
10615 tree newcst = wide_int_to_tree (ty, inv);
10617 if (uniform)
10618 return build_uniform_cst (newtype, newcst);
10620 builder.quick_push (newcst);
10622 while (++idx < nelts);
10624 return builder.build ();
10627 /* If VECTOR_CST T has a single nonzero element, return the index of that
10628 element, otherwise return -1. */
10631 single_nonzero_element (const_tree t)
10633 unsigned HOST_WIDE_INT nelts;
10634 unsigned int repeat_nelts;
10635 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10636 repeat_nelts = nelts;
10637 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10639 nelts = vector_cst_encoded_nelts (t);
10640 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10642 else
10643 return -1;
10645 int res = -1;
10646 for (unsigned int i = 0; i < nelts; ++i)
10648 tree elt = vector_cst_elt (t, i);
10649 if (!integer_zerop (elt) && !real_zerop (elt))
10651 if (res >= 0 || i >= repeat_nelts)
10652 return -1;
10653 res = i;
10656 return res;
10659 /* Build an empty statement at location LOC. */
10661 tree
10662 build_empty_stmt (location_t loc)
10664 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10665 SET_EXPR_LOCATION (t, loc);
10666 return t;
10670 /* Build an OMP clause with code CODE. LOC is the location of the
10671 clause. */
10673 tree
10674 build_omp_clause (location_t loc, enum omp_clause_code code)
10676 tree t;
10677 int size, length;
10679 length = omp_clause_num_ops[code];
10680 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10682 record_node_allocation_statistics (OMP_CLAUSE, size);
10684 t = (tree) ggc_internal_alloc (size);
10685 memset (t, 0, size);
10686 TREE_SET_CODE (t, OMP_CLAUSE);
10687 OMP_CLAUSE_SET_CODE (t, code);
10688 OMP_CLAUSE_LOCATION (t) = loc;
10690 return t;
10693 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10694 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10695 Except for the CODE and operand count field, other storage for the
10696 object is initialized to zeros. */
10698 tree
10699 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10701 tree t;
10702 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10704 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10705 gcc_assert (len >= 1);
10707 record_node_allocation_statistics (code, length);
10709 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10711 TREE_SET_CODE (t, code);
10713 /* Can't use TREE_OPERAND to store the length because if checking is
10714 enabled, it will try to check the length before we store it. :-P */
10715 t->exp.operands[0] = build_int_cst (sizetype, len);
10717 return t;
10720 /* Helper function for build_call_* functions; build a CALL_EXPR with
10721 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10722 the argument slots. */
10724 static tree
10725 build_call_1 (tree return_type, tree fn, int nargs)
10727 tree t;
10729 t = build_vl_exp (CALL_EXPR, nargs + 3);
10730 TREE_TYPE (t) = return_type;
10731 CALL_EXPR_FN (t) = fn;
10732 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10734 return t;
10737 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10738 FN and a null static chain slot. NARGS is the number of call arguments
10739 which are specified as "..." arguments. */
10741 tree
10742 build_call_nary (tree return_type, tree fn, int nargs, ...)
10744 tree ret;
10745 va_list args;
10746 va_start (args, nargs);
10747 ret = build_call_valist (return_type, fn, nargs, args);
10748 va_end (args);
10749 return ret;
10752 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10753 FN and a null static chain slot. NARGS is the number of call arguments
10754 which are specified as a va_list ARGS. */
10756 tree
10757 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10759 tree t;
10760 int i;
10762 t = build_call_1 (return_type, fn, nargs);
10763 for (i = 0; i < nargs; i++)
10764 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10765 process_call_operands (t);
10766 return t;
10769 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10770 FN and a null static chain slot. NARGS is the number of call arguments
10771 which are specified as a tree array ARGS. */
10773 tree
10774 build_call_array_loc (location_t loc, tree return_type, tree fn,
10775 int nargs, const tree *args)
10777 tree t;
10778 int i;
10780 t = build_call_1 (return_type, fn, nargs);
10781 for (i = 0; i < nargs; i++)
10782 CALL_EXPR_ARG (t, i) = args[i];
10783 process_call_operands (t);
10784 SET_EXPR_LOCATION (t, loc);
10785 return t;
10788 /* Like build_call_array, but takes a vec. */
10790 tree
10791 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10793 tree ret, t;
10794 unsigned int ix;
10796 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10797 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10798 CALL_EXPR_ARG (ret, ix) = t;
10799 process_call_operands (ret);
10800 return ret;
10803 /* Conveniently construct a function call expression. FNDECL names the
10804 function to be called and N arguments are passed in the array
10805 ARGARRAY. */
10807 tree
10808 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10810 tree fntype = TREE_TYPE (fndecl);
10811 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10813 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10816 /* Conveniently construct a function call expression. FNDECL names the
10817 function to be called and the arguments are passed in the vector
10818 VEC. */
10820 tree
10821 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10823 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10824 vec_safe_address (vec));
10828 /* Conveniently construct a function call expression. FNDECL names the
10829 function to be called, N is the number of arguments, and the "..."
10830 parameters are the argument expressions. */
10832 tree
10833 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10835 va_list ap;
10836 tree *argarray = XALLOCAVEC (tree, n);
10837 int i;
10839 va_start (ap, n);
10840 for (i = 0; i < n; i++)
10841 argarray[i] = va_arg (ap, tree);
10842 va_end (ap);
10843 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10846 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10847 varargs macros aren't supported by all bootstrap compilers. */
10849 tree
10850 build_call_expr (tree fndecl, int n, ...)
10852 va_list ap;
10853 tree *argarray = XALLOCAVEC (tree, n);
10854 int i;
10856 va_start (ap, n);
10857 for (i = 0; i < n; i++)
10858 argarray[i] = va_arg (ap, tree);
10859 va_end (ap);
10860 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10863 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10864 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10865 It will get gimplified later into an ordinary internal function. */
10867 tree
10868 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10869 tree type, int n, const tree *args)
10871 tree t = build_call_1 (type, NULL_TREE, n);
10872 for (int i = 0; i < n; ++i)
10873 CALL_EXPR_ARG (t, i) = args[i];
10874 SET_EXPR_LOCATION (t, loc);
10875 CALL_EXPR_IFN (t) = ifn;
10876 process_call_operands (t);
10877 return t;
10880 /* Build internal call expression. This is just like CALL_EXPR, except
10881 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10882 internal function. */
10884 tree
10885 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10886 tree type, int n, ...)
10888 va_list ap;
10889 tree *argarray = XALLOCAVEC (tree, n);
10890 int i;
10892 va_start (ap, n);
10893 for (i = 0; i < n; i++)
10894 argarray[i] = va_arg (ap, tree);
10895 va_end (ap);
10896 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10899 /* Return a function call to FN, if the target is guaranteed to support it,
10900 or null otherwise.
10902 N is the number of arguments, passed in the "...", and TYPE is the
10903 type of the return value. */
10905 tree
10906 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10907 int n, ...)
10909 va_list ap;
10910 tree *argarray = XALLOCAVEC (tree, n);
10911 int i;
10913 va_start (ap, n);
10914 for (i = 0; i < n; i++)
10915 argarray[i] = va_arg (ap, tree);
10916 va_end (ap);
10917 if (internal_fn_p (fn))
10919 internal_fn ifn = as_internal_fn (fn);
10920 if (direct_internal_fn_p (ifn))
10922 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10923 if (!direct_internal_fn_supported_p (ifn, types,
10924 OPTIMIZE_FOR_BOTH))
10925 return NULL_TREE;
10927 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10929 else
10931 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10932 if (!fndecl)
10933 return NULL_TREE;
10934 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10938 /* Return a function call to the appropriate builtin alloca variant.
10940 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10941 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10942 bound for SIZE in case it is not a fixed value. */
10944 tree
10945 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10947 if (max_size >= 0)
10949 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10950 return
10951 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10953 else if (align > 0)
10955 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10956 return build_call_expr (t, 2, size, size_int (align));
10958 else
10960 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10961 return build_call_expr (t, 1, size);
10965 /* The built-in decl to use to mark code points believed to be unreachable.
10966 Typically __builtin_unreachable, but __builtin_trap if
10967 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10968 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10969 appropriate ubsan function. When building a call directly, use
10970 {gimple_},build_builtin_unreachable instead. */
10972 tree
10973 builtin_decl_unreachable ()
10975 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10977 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10978 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10979 : flag_unreachable_traps)
10980 fncode = BUILT_IN_UNREACHABLE_TRAP;
10981 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10982 in the sanopt pass. */
10984 return builtin_decl_explicit (fncode);
10987 /* Build a call to __builtin_unreachable, possibly rewritten by
10988 -fsanitize=unreachable. Use this rather than the above when practical. */
10990 tree
10991 build_builtin_unreachable (location_t loc)
10993 tree data = NULL_TREE;
10994 tree fn = sanitize_unreachable_fn (&data, loc);
10995 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10998 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10999 if SIZE == -1) and return a tree node representing char* pointer to
11000 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
11001 the STRING_CST value is the LEN bytes at STR (the representation
11002 of the string, which may be wide). Otherwise it's all zeros. */
11004 tree
11005 build_string_literal (unsigned len, const char *str /* = NULL */,
11006 tree eltype /* = char_type_node */,
11007 unsigned HOST_WIDE_INT size /* = -1 */)
11009 tree t = build_string (len, str);
11010 /* Set the maximum valid index based on the string length or SIZE. */
11011 unsigned HOST_WIDE_INT maxidx
11012 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11014 tree index = build_index_type (size_int (maxidx));
11015 eltype = build_type_variant (eltype, 1, 0);
11016 tree type = build_array_type (eltype, index);
11017 TREE_TYPE (t) = type;
11018 TREE_CONSTANT (t) = 1;
11019 TREE_READONLY (t) = 1;
11020 TREE_STATIC (t) = 1;
11022 type = build_pointer_type (eltype);
11023 t = build1 (ADDR_EXPR, type,
11024 build4 (ARRAY_REF, eltype,
11025 t, integer_zero_node, NULL_TREE, NULL_TREE));
11026 return t;
11031 /* Return true if T (assumed to be a DECL) must be assigned a memory
11032 location. */
11034 bool
11035 needs_to_live_in_memory (const_tree t)
11037 return (TREE_ADDRESSABLE (t)
11038 || is_global_var (t)
11039 || (TREE_CODE (t) == RESULT_DECL
11040 && !DECL_BY_REFERENCE (t)
11041 && aggregate_value_p (t, current_function_decl)));
11044 /* Return value of a constant X and sign-extend it. */
11046 HOST_WIDE_INT
11047 int_cst_value (const_tree x)
11049 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11050 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11052 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11053 gcc_assert (cst_and_fits_in_hwi (x));
11055 if (bits < HOST_BITS_PER_WIDE_INT)
11057 bool negative = ((val >> (bits - 1)) & 1) != 0;
11058 if (negative)
11059 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11060 else
11061 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11064 return val;
11067 /* If TYPE is an integral or pointer type, return an integer type with
11068 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11069 if TYPE is already an integer type of signedness UNSIGNEDP.
11070 If TYPE is a floating-point type, return an integer type with the same
11071 bitsize and with the signedness given by UNSIGNEDP; this is useful
11072 when doing bit-level operations on a floating-point value. */
11074 tree
11075 signed_or_unsigned_type_for (int unsignedp, tree type)
11077 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11078 return type;
11080 if (TREE_CODE (type) == VECTOR_TYPE)
11082 tree inner = TREE_TYPE (type);
11083 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11084 if (!inner2)
11085 return NULL_TREE;
11086 if (inner == inner2)
11087 return type;
11088 machine_mode new_mode;
11089 if (VECTOR_MODE_P (TYPE_MODE (type))
11090 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11091 return build_vector_type_for_mode (inner2, new_mode);
11092 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11095 if (TREE_CODE (type) == COMPLEX_TYPE)
11097 tree inner = TREE_TYPE (type);
11098 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11099 if (!inner2)
11100 return NULL_TREE;
11101 if (inner == inner2)
11102 return type;
11103 return build_complex_type (inner2);
11106 unsigned int bits;
11107 if (INTEGRAL_TYPE_P (type)
11108 || POINTER_TYPE_P (type)
11109 || TREE_CODE (type) == OFFSET_TYPE)
11110 bits = TYPE_PRECISION (type);
11111 else if (TREE_CODE (type) == REAL_TYPE)
11112 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11113 else
11114 return NULL_TREE;
11116 if (TREE_CODE (type) == BITINT_TYPE && (unsignedp || bits > 1))
11117 return build_bitint_type (bits, unsignedp);
11118 return build_nonstandard_integer_type (bits, unsignedp);
11121 /* If TYPE is an integral or pointer type, return an integer type with
11122 the same precision which is unsigned, or itself if TYPE is already an
11123 unsigned integer type. If TYPE is a floating-point type, return an
11124 unsigned integer type with the same bitsize as TYPE. */
11126 tree
11127 unsigned_type_for (tree type)
11129 return signed_or_unsigned_type_for (1, type);
11132 /* If TYPE is an integral or pointer type, return an integer type with
11133 the same precision which is signed, or itself if TYPE is already a
11134 signed integer type. If TYPE is a floating-point type, return a
11135 signed integer type with the same bitsize as TYPE. */
11137 tree
11138 signed_type_for (tree type)
11140 return signed_or_unsigned_type_for (0, type);
11143 /* - For VECTOR_TYPEs:
11144 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11145 - The number of elements must match (known_eq).
11146 - targetm.vectorize.get_mask_mode exists, and exactly
11147 the same mode as the truth type.
11148 - Otherwise, the truth type must be a BOOLEAN_TYPE
11149 or useless_type_conversion_p to BOOLEAN_TYPE. */
11150 bool
11151 is_truth_type_for (tree type, tree truth_type)
11153 machine_mode mask_mode = TYPE_MODE (truth_type);
11154 machine_mode vmode = TYPE_MODE (type);
11155 machine_mode tmask_mode;
11157 if (TREE_CODE (type) == VECTOR_TYPE)
11159 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11160 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11161 TYPE_VECTOR_SUBPARTS (truth_type))
11162 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11163 && tmask_mode == mask_mode)
11164 return true;
11166 return false;
11169 return useless_type_conversion_p (boolean_type_node, truth_type);
11172 /* If TYPE is a vector type, return a signed integer vector type with the
11173 same width and number of subparts. Otherwise return boolean_type_node. */
11175 tree
11176 truth_type_for (tree type)
11178 if (TREE_CODE (type) == VECTOR_TYPE)
11180 if (VECTOR_BOOLEAN_TYPE_P (type))
11181 return type;
11182 return build_truth_vector_type_for (type);
11184 else
11185 return boolean_type_node;
11188 /* Returns the largest value obtainable by casting something in INNER type to
11189 OUTER type. */
11191 tree
11192 upper_bound_in_type (tree outer, tree inner)
11194 unsigned int det = 0;
11195 unsigned oprec = TYPE_PRECISION (outer);
11196 unsigned iprec = TYPE_PRECISION (inner);
11197 unsigned prec;
11199 /* Compute a unique number for every combination. */
11200 det |= (oprec > iprec) ? 4 : 0;
11201 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11202 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11204 /* Determine the exponent to use. */
11205 switch (det)
11207 case 0:
11208 case 1:
11209 /* oprec <= iprec, outer: signed, inner: don't care. */
11210 prec = oprec - 1;
11211 break;
11212 case 2:
11213 case 3:
11214 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11215 prec = oprec;
11216 break;
11217 case 4:
11218 /* oprec > iprec, outer: signed, inner: signed. */
11219 prec = iprec - 1;
11220 break;
11221 case 5:
11222 /* oprec > iprec, outer: signed, inner: unsigned. */
11223 prec = iprec;
11224 break;
11225 case 6:
11226 /* oprec > iprec, outer: unsigned, inner: signed. */
11227 prec = oprec;
11228 break;
11229 case 7:
11230 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11231 prec = iprec;
11232 break;
11233 default:
11234 gcc_unreachable ();
11237 return wide_int_to_tree (outer,
11238 wi::mask (prec, false, TYPE_PRECISION (outer)));
11241 /* Returns the smallest value obtainable by casting something in INNER type to
11242 OUTER type. */
11244 tree
11245 lower_bound_in_type (tree outer, tree inner)
11247 unsigned oprec = TYPE_PRECISION (outer);
11248 unsigned iprec = TYPE_PRECISION (inner);
11250 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11251 and obtain 0. */
11252 if (TYPE_UNSIGNED (outer)
11253 /* If we are widening something of an unsigned type, OUTER type
11254 contains all values of INNER type. In particular, both INNER
11255 and OUTER types have zero in common. */
11256 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11257 return build_int_cst (outer, 0);
11258 else
11260 /* If we are widening a signed type to another signed type, we
11261 want to obtain -2^^(iprec-1). If we are keeping the
11262 precision or narrowing to a signed type, we want to obtain
11263 -2^(oprec-1). */
11264 unsigned prec = oprec > iprec ? iprec : oprec;
11265 return wide_int_to_tree (outer,
11266 wi::mask (prec - 1, true,
11267 TYPE_PRECISION (outer)));
11271 /* Return true if two operands that are suitable for PHI nodes are
11272 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11273 SSA_NAME or invariant. Note that this is strictly an optimization.
11274 That is, callers of this function can directly call operand_equal_p
11275 and get the same result, only slower. */
11277 bool
11278 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11280 if (arg0 == arg1)
11281 return true;
11282 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11283 return false;
11284 return operand_equal_p (arg0, arg1, 0);
11287 /* Returns number of zeros at the end of binary representation of X. */
11289 tree
11290 num_ending_zeros (const_tree x)
11292 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11296 #define WALK_SUBTREE(NODE) \
11297 do \
11299 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11300 if (result) \
11301 return result; \
11303 while (0)
11305 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11306 be walked whenever a type is seen in the tree. Rest of operands and return
11307 value are as for walk_tree. */
11309 static tree
11310 walk_type_fields (tree type, walk_tree_fn func, void *data,
11311 hash_set<tree> *pset, walk_tree_lh lh)
11313 tree result = NULL_TREE;
11315 switch (TREE_CODE (type))
11317 case POINTER_TYPE:
11318 case REFERENCE_TYPE:
11319 case VECTOR_TYPE:
11320 /* We have to worry about mutually recursive pointers. These can't
11321 be written in C. They can in Ada. It's pathological, but
11322 there's an ACATS test (c38102a) that checks it. Deal with this
11323 by checking if we're pointing to another pointer, that one
11324 points to another pointer, that one does too, and we have no htab.
11325 If so, get a hash table. We check three levels deep to avoid
11326 the cost of the hash table if we don't need one. */
11327 if (POINTER_TYPE_P (TREE_TYPE (type))
11328 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11329 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11330 && !pset)
11332 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11333 func, data);
11334 if (result)
11335 return result;
11337 break;
11340 /* fall through */
11342 case COMPLEX_TYPE:
11343 WALK_SUBTREE (TREE_TYPE (type));
11344 break;
11346 case METHOD_TYPE:
11347 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11349 /* Fall through. */
11351 case FUNCTION_TYPE:
11352 WALK_SUBTREE (TREE_TYPE (type));
11354 tree arg;
11356 /* We never want to walk into default arguments. */
11357 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11358 WALK_SUBTREE (TREE_VALUE (arg));
11360 break;
11362 case ARRAY_TYPE:
11363 /* Don't follow this nodes's type if a pointer for fear that
11364 we'll have infinite recursion. If we have a PSET, then we
11365 need not fear. */
11366 if (pset
11367 || (!POINTER_TYPE_P (TREE_TYPE (type))
11368 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11369 WALK_SUBTREE (TREE_TYPE (type));
11370 WALK_SUBTREE (TYPE_DOMAIN (type));
11371 break;
11373 case OFFSET_TYPE:
11374 WALK_SUBTREE (TREE_TYPE (type));
11375 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11376 break;
11378 default:
11379 break;
11382 return NULL_TREE;
11385 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11386 called with the DATA and the address of each sub-tree. If FUNC returns a
11387 non-NULL value, the traversal is stopped, and the value returned by FUNC
11388 is returned. If PSET is non-NULL it is used to record the nodes visited,
11389 and to avoid visiting a node more than once. */
11391 tree
11392 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11393 hash_set<tree> *pset, walk_tree_lh lh)
11395 #define WALK_SUBTREE_TAIL(NODE) \
11396 do \
11398 tp = & (NODE); \
11399 goto tail_recurse; \
11401 while (0)
11403 tail_recurse:
11404 /* Skip empty subtrees. */
11405 if (!*tp)
11406 return NULL_TREE;
11408 /* Don't walk the same tree twice, if the user has requested
11409 that we avoid doing so. */
11410 if (pset && pset->add (*tp))
11411 return NULL_TREE;
11413 /* Call the function. */
11414 int walk_subtrees = 1;
11415 tree result = (*func) (tp, &walk_subtrees, data);
11417 /* If we found something, return it. */
11418 if (result)
11419 return result;
11421 tree t = *tp;
11422 tree_code code = TREE_CODE (t);
11424 /* Even if we didn't, FUNC may have decided that there was nothing
11425 interesting below this point in the tree. */
11426 if (!walk_subtrees)
11428 /* But we still need to check our siblings. */
11429 if (code == TREE_LIST)
11430 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11431 else if (code == OMP_CLAUSE)
11432 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11433 else
11434 return NULL_TREE;
11437 if (lh)
11439 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11440 if (result || !walk_subtrees)
11441 return result;
11444 switch (code)
11446 case ERROR_MARK:
11447 case IDENTIFIER_NODE:
11448 case INTEGER_CST:
11449 case REAL_CST:
11450 case FIXED_CST:
11451 case STRING_CST:
11452 case BLOCK:
11453 case PLACEHOLDER_EXPR:
11454 case SSA_NAME:
11455 case FIELD_DECL:
11456 case RESULT_DECL:
11457 /* None of these have subtrees other than those already walked
11458 above. */
11459 break;
11461 case TREE_LIST:
11462 WALK_SUBTREE (TREE_VALUE (t));
11463 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11465 case TREE_VEC:
11467 int len = TREE_VEC_LENGTH (t);
11469 if (len == 0)
11470 break;
11472 /* Walk all elements but the last. */
11473 for (int i = 0; i < len - 1; ++i)
11474 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11476 /* Now walk the last one as a tail call. */
11477 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11480 case VECTOR_CST:
11482 unsigned len = vector_cst_encoded_nelts (t);
11483 if (len == 0)
11484 break;
11485 /* Walk all elements but the last. */
11486 for (unsigned i = 0; i < len - 1; ++i)
11487 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11488 /* Now walk the last one as a tail call. */
11489 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11492 case COMPLEX_CST:
11493 WALK_SUBTREE (TREE_REALPART (t));
11494 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11496 case CONSTRUCTOR:
11498 unsigned HOST_WIDE_INT idx;
11499 constructor_elt *ce;
11501 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11502 idx++)
11503 WALK_SUBTREE (ce->value);
11505 break;
11507 case SAVE_EXPR:
11508 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11510 case BIND_EXPR:
11512 tree decl;
11513 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11515 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11516 into declarations that are just mentioned, rather than
11517 declared; they don't really belong to this part of the tree.
11518 And, we can see cycles: the initializer for a declaration
11519 can refer to the declaration itself. */
11520 WALK_SUBTREE (DECL_INITIAL (decl));
11521 WALK_SUBTREE (DECL_SIZE (decl));
11522 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11524 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11527 case STATEMENT_LIST:
11529 tree_stmt_iterator i;
11530 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11531 WALK_SUBTREE (*tsi_stmt_ptr (i));
11533 break;
11535 case OMP_CLAUSE:
11537 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11538 for (int i = 0; i < len; i++)
11539 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11540 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11543 case TARGET_EXPR:
11545 int i, len;
11547 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11548 But, we only want to walk once. */
11549 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11550 for (i = 0; i < len; ++i)
11551 WALK_SUBTREE (TREE_OPERAND (t, i));
11552 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11555 case DECL_EXPR:
11556 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11557 defining. We only want to walk into these fields of a type in this
11558 case and not in the general case of a mere reference to the type.
11560 The criterion is as follows: if the field can be an expression, it
11561 must be walked only here. This should be in keeping with the fields
11562 that are directly gimplified in gimplify_type_sizes in order for the
11563 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11564 variable-sized types.
11566 Note that DECLs get walked as part of processing the BIND_EXPR. */
11567 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11569 /* Call the function for the decl so e.g. copy_tree_body_r can
11570 replace it with the remapped one. */
11571 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11572 if (result || !walk_subtrees)
11573 return result;
11575 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11576 if (TREE_CODE (*type_p) == ERROR_MARK)
11577 return NULL_TREE;
11579 /* Call the function for the type. See if it returns anything or
11580 doesn't want us to continue. If we are to continue, walk both
11581 the normal fields and those for the declaration case. */
11582 result = (*func) (type_p, &walk_subtrees, data);
11583 if (result || !walk_subtrees)
11584 return result;
11586 tree type = *type_p;
11588 /* But do not walk a pointed-to type since it may itself need to
11589 be walked in the declaration case if it isn't anonymous. */
11590 if (!POINTER_TYPE_P (type))
11592 result = walk_type_fields (type, func, data, pset, lh);
11593 if (result)
11594 return result;
11597 /* If this is a record type, also walk the fields. */
11598 if (RECORD_OR_UNION_TYPE_P (type))
11600 tree field;
11602 for (field = TYPE_FIELDS (type); field;
11603 field = DECL_CHAIN (field))
11605 /* We'd like to look at the type of the field, but we can
11606 easily get infinite recursion. So assume it's pointed
11607 to elsewhere in the tree. Also, ignore things that
11608 aren't fields. */
11609 if (TREE_CODE (field) != FIELD_DECL)
11610 continue;
11612 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11613 WALK_SUBTREE (DECL_SIZE (field));
11614 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11615 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11616 WALK_SUBTREE (DECL_QUALIFIER (field));
11620 /* Same for scalar types. */
11621 else if (TREE_CODE (type) == BOOLEAN_TYPE
11622 || TREE_CODE (type) == ENUMERAL_TYPE
11623 || TREE_CODE (type) == INTEGER_TYPE
11624 || TREE_CODE (type) == FIXED_POINT_TYPE
11625 || TREE_CODE (type) == REAL_TYPE)
11627 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11628 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11631 WALK_SUBTREE (TYPE_SIZE (type));
11632 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11634 /* FALLTHRU */
11636 default:
11637 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11639 int i, len;
11641 /* Walk over all the sub-trees of this operand. */
11642 len = TREE_OPERAND_LENGTH (t);
11644 /* Go through the subtrees. We need to do this in forward order so
11645 that the scope of a FOR_EXPR is handled properly. */
11646 if (len)
11648 for (i = 0; i < len - 1; ++i)
11649 WALK_SUBTREE (TREE_OPERAND (t, i));
11650 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11653 /* If this is a type, walk the needed fields in the type. */
11654 else if (TYPE_P (t))
11655 return walk_type_fields (t, func, data, pset, lh);
11656 break;
11659 /* We didn't find what we were looking for. */
11660 return NULL_TREE;
11662 #undef WALK_SUBTREE_TAIL
11664 #undef WALK_SUBTREE
11666 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11668 tree
11669 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11670 walk_tree_lh lh)
11672 tree result;
11674 hash_set<tree> pset;
11675 result = walk_tree_1 (tp, func, data, &pset, lh);
11676 return result;
11680 tree
11681 tree_block (tree t)
11683 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11685 if (IS_EXPR_CODE_CLASS (c))
11686 return LOCATION_BLOCK (t->exp.locus);
11687 gcc_unreachable ();
11688 return NULL;
11691 void
11692 tree_set_block (tree t, tree b)
11694 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11696 if (IS_EXPR_CODE_CLASS (c))
11698 t->exp.locus = set_block (t->exp.locus, b);
11700 else
11701 gcc_unreachable ();
11704 /* Create a nameless artificial label and put it in the current
11705 function context. The label has a location of LOC. Returns the
11706 newly created label. */
11708 tree
11709 create_artificial_label (location_t loc)
11711 tree lab = build_decl (loc,
11712 LABEL_DECL, NULL_TREE, void_type_node);
11714 DECL_ARTIFICIAL (lab) = 1;
11715 DECL_IGNORED_P (lab) = 1;
11716 DECL_CONTEXT (lab) = current_function_decl;
11717 return lab;
11720 /* Given a tree, try to return a useful variable name that we can use
11721 to prefix a temporary that is being assigned the value of the tree.
11722 I.E. given <temp> = &A, return A. */
11724 const char *
11725 get_name (tree t)
11727 tree stripped_decl;
11729 stripped_decl = t;
11730 STRIP_NOPS (stripped_decl);
11731 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11732 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11733 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11735 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11736 if (!name)
11737 return NULL;
11738 return IDENTIFIER_POINTER (name);
11740 else
11742 switch (TREE_CODE (stripped_decl))
11744 case ADDR_EXPR:
11745 return get_name (TREE_OPERAND (stripped_decl, 0));
11746 default:
11747 return NULL;
11752 /* Return true if TYPE has a variable argument list. */
11754 bool
11755 stdarg_p (const_tree fntype)
11757 function_args_iterator args_iter;
11758 tree n = NULL_TREE, t;
11760 if (!fntype)
11761 return false;
11763 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11764 return true;
11766 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11768 n = t;
11771 return n != NULL_TREE && n != void_type_node;
11774 /* Return true if TYPE has a prototype. */
11776 bool
11777 prototype_p (const_tree fntype)
11779 tree t;
11781 gcc_assert (fntype != NULL_TREE);
11783 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11784 return true;
11786 t = TYPE_ARG_TYPES (fntype);
11787 return (t != NULL_TREE);
11790 /* If BLOCK is inlined from an __attribute__((__artificial__))
11791 routine, return pointer to location from where it has been
11792 called. */
11793 location_t *
11794 block_nonartificial_location (tree block)
11796 location_t *ret = NULL;
11798 while (block && TREE_CODE (block) == BLOCK
11799 && BLOCK_ABSTRACT_ORIGIN (block))
11801 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11802 if (TREE_CODE (ao) == FUNCTION_DECL)
11804 /* If AO is an artificial inline, point RET to the
11805 call site locus at which it has been inlined and continue
11806 the loop, in case AO's caller is also an artificial
11807 inline. */
11808 if (DECL_DECLARED_INLINE_P (ao)
11809 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11810 ret = &BLOCK_SOURCE_LOCATION (block);
11811 else
11812 break;
11814 else if (TREE_CODE (ao) != BLOCK)
11815 break;
11817 block = BLOCK_SUPERCONTEXT (block);
11819 return ret;
11823 /* If EXP is inlined from an __attribute__((__artificial__))
11824 function, return the location of the original call expression. */
11826 location_t
11827 tree_nonartificial_location (tree exp)
11829 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11831 if (loc)
11832 return *loc;
11833 else
11834 return EXPR_LOCATION (exp);
11837 /* Return the location into which EXP has been inlined. Analogous
11838 to tree_nonartificial_location() above but not limited to artificial
11839 functions declared inline. If SYSTEM_HEADER is true, return
11840 the macro expansion point of the location if it's in a system header */
11842 location_t
11843 tree_inlined_location (tree exp, bool system_header /* = true */)
11845 location_t loc = UNKNOWN_LOCATION;
11847 tree block = TREE_BLOCK (exp);
11849 while (block && TREE_CODE (block) == BLOCK
11850 && BLOCK_ABSTRACT_ORIGIN (block))
11852 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11853 if (TREE_CODE (ao) == FUNCTION_DECL)
11854 loc = BLOCK_SOURCE_LOCATION (block);
11855 else if (TREE_CODE (ao) != BLOCK)
11856 break;
11858 block = BLOCK_SUPERCONTEXT (block);
11861 if (loc == UNKNOWN_LOCATION)
11863 loc = EXPR_LOCATION (exp);
11864 if (system_header)
11865 /* Only consider macro expansion when the block traversal failed
11866 to find a location. Otherwise it's not relevant. */
11867 return expansion_point_location_if_in_system_header (loc);
11870 return loc;
11873 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11874 nodes. */
11876 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11878 hashval_t
11879 cl_option_hasher::hash (tree x)
11881 const_tree const t = x;
11883 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11884 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11885 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11886 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11887 else
11888 gcc_unreachable ();
11891 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11892 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11893 same. */
11895 bool
11896 cl_option_hasher::equal (tree x, tree y)
11898 const_tree const xt = x;
11899 const_tree const yt = y;
11901 if (TREE_CODE (xt) != TREE_CODE (yt))
11902 return false;
11904 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11905 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11906 TREE_OPTIMIZATION (yt));
11907 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11908 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11909 TREE_TARGET_OPTION (yt));
11910 else
11911 gcc_unreachable ();
11914 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11916 tree
11917 build_optimization_node (struct gcc_options *opts,
11918 struct gcc_options *opts_set)
11920 tree t;
11922 /* Use the cache of optimization nodes. */
11924 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11925 opts, opts_set);
11927 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11928 t = *slot;
11929 if (!t)
11931 /* Insert this one into the hash table. */
11932 t = cl_optimization_node;
11933 *slot = t;
11935 /* Make a new node for next time round. */
11936 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11939 return t;
11942 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11944 tree
11945 build_target_option_node (struct gcc_options *opts,
11946 struct gcc_options *opts_set)
11948 tree t;
11950 /* Use the cache of optimization nodes. */
11952 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11953 opts, opts_set);
11955 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11956 t = *slot;
11957 if (!t)
11959 /* Insert this one into the hash table. */
11960 t = cl_target_option_node;
11961 *slot = t;
11963 /* Make a new node for next time round. */
11964 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11967 return t;
11970 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11971 so that they aren't saved during PCH writing. */
11973 void
11974 prepare_target_option_nodes_for_pch (void)
11976 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11977 for (; iter != cl_option_hash_table->end (); ++iter)
11978 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11979 TREE_TARGET_GLOBALS (*iter) = NULL;
11982 /* Determine the "ultimate origin" of a block. */
11984 tree
11985 block_ultimate_origin (const_tree block)
11987 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11989 if (origin == NULL_TREE)
11990 return NULL_TREE;
11991 else
11993 gcc_checking_assert ((DECL_P (origin)
11994 && DECL_ORIGIN (origin) == origin)
11995 || BLOCK_ORIGIN (origin) == origin);
11996 return origin;
12000 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12001 no instruction. */
12003 bool
12004 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12006 /* Do not strip casts into or out of differing address spaces. */
12007 if (POINTER_TYPE_P (outer_type)
12008 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12010 if (!POINTER_TYPE_P (inner_type)
12011 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12012 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12013 return false;
12015 else if (POINTER_TYPE_P (inner_type)
12016 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12018 /* We already know that outer_type is not a pointer with
12019 a non-generic address space. */
12020 return false;
12023 /* Use precision rather then machine mode when we can, which gives
12024 the correct answer even for submode (bit-field) types. */
12025 if ((INTEGRAL_TYPE_P (outer_type)
12026 || POINTER_TYPE_P (outer_type)
12027 || TREE_CODE (outer_type) == OFFSET_TYPE)
12028 && (INTEGRAL_TYPE_P (inner_type)
12029 || POINTER_TYPE_P (inner_type)
12030 || TREE_CODE (inner_type) == OFFSET_TYPE))
12031 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12033 /* Otherwise fall back on comparing machine modes (e.g. for
12034 aggregate types, floats). */
12035 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12038 /* Return true iff conversion in EXP generates no instruction. Mark
12039 it inline so that we fully inline into the stripping functions even
12040 though we have two uses of this function. */
12042 static inline bool
12043 tree_nop_conversion (const_tree exp)
12045 tree outer_type, inner_type;
12047 if (location_wrapper_p (exp))
12048 return true;
12049 if (!CONVERT_EXPR_P (exp)
12050 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12051 return false;
12053 outer_type = TREE_TYPE (exp);
12054 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12055 if (!inner_type || inner_type == error_mark_node)
12056 return false;
12058 return tree_nop_conversion_p (outer_type, inner_type);
12061 /* Return true iff conversion in EXP generates no instruction. Don't
12062 consider conversions changing the signedness. */
12064 static bool
12065 tree_sign_nop_conversion (const_tree exp)
12067 tree outer_type, inner_type;
12069 if (!tree_nop_conversion (exp))
12070 return false;
12072 outer_type = TREE_TYPE (exp);
12073 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12075 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12076 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12079 /* Strip conversions from EXP according to tree_nop_conversion and
12080 return the resulting expression. */
12082 tree
12083 tree_strip_nop_conversions (tree exp)
12085 while (tree_nop_conversion (exp))
12086 exp = TREE_OPERAND (exp, 0);
12087 return exp;
12090 /* Strip conversions from EXP according to tree_sign_nop_conversion
12091 and return the resulting expression. */
12093 tree
12094 tree_strip_sign_nop_conversions (tree exp)
12096 while (tree_sign_nop_conversion (exp))
12097 exp = TREE_OPERAND (exp, 0);
12098 return exp;
12101 /* Avoid any floating point extensions from EXP. */
12102 tree
12103 strip_float_extensions (tree exp)
12105 tree sub, expt, subt;
12107 /* For floating point constant look up the narrowest type that can hold
12108 it properly and handle it like (type)(narrowest_type)constant.
12109 This way we can optimize for instance a=a*2.0 where "a" is float
12110 but 2.0 is double constant. */
12111 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12113 REAL_VALUE_TYPE orig;
12114 tree type = NULL;
12116 orig = TREE_REAL_CST (exp);
12117 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12118 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12119 type = float_type_node;
12120 else if (TYPE_PRECISION (TREE_TYPE (exp))
12121 > TYPE_PRECISION (double_type_node)
12122 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12123 type = double_type_node;
12124 if (type)
12125 return build_real_truncate (type, orig);
12128 if (!CONVERT_EXPR_P (exp))
12129 return exp;
12131 sub = TREE_OPERAND (exp, 0);
12132 subt = TREE_TYPE (sub);
12133 expt = TREE_TYPE (exp);
12135 if (!FLOAT_TYPE_P (subt))
12136 return exp;
12138 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12139 return exp;
12141 if (element_precision (subt) > element_precision (expt))
12142 return exp;
12144 return strip_float_extensions (sub);
12147 /* Strip out all handled components that produce invariant
12148 offsets. */
12150 const_tree
12151 strip_invariant_refs (const_tree op)
12153 while (handled_component_p (op))
12155 switch (TREE_CODE (op))
12157 case ARRAY_REF:
12158 case ARRAY_RANGE_REF:
12159 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12160 || TREE_OPERAND (op, 2) != NULL_TREE
12161 || TREE_OPERAND (op, 3) != NULL_TREE)
12162 return NULL;
12163 break;
12165 case COMPONENT_REF:
12166 if (TREE_OPERAND (op, 2) != NULL_TREE)
12167 return NULL;
12168 break;
12170 default:;
12172 op = TREE_OPERAND (op, 0);
12175 return op;
12178 /* Strip handled components with zero offset from OP. */
12180 tree
12181 strip_zero_offset_components (tree op)
12183 while (TREE_CODE (op) == COMPONENT_REF
12184 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12185 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12186 op = TREE_OPERAND (op, 0);
12187 return op;
12190 static GTY(()) tree gcc_eh_personality_decl;
12192 /* Return the GCC personality function decl. */
12194 tree
12195 lhd_gcc_personality (void)
12197 if (!gcc_eh_personality_decl)
12198 gcc_eh_personality_decl = build_personality_function ("gcc");
12199 return gcc_eh_personality_decl;
12202 /* TARGET is a call target of GIMPLE call statement
12203 (obtained by gimple_call_fn). Return true if it is
12204 OBJ_TYPE_REF representing an virtual call of C++ method.
12205 (As opposed to OBJ_TYPE_REF representing objc calls
12206 through a cast where middle-end devirtualization machinery
12207 can't apply.) FOR_DUMP_P is true when being called from
12208 the dump routines. */
12210 bool
12211 virtual_method_call_p (const_tree target, bool for_dump_p)
12213 if (TREE_CODE (target) != OBJ_TYPE_REF)
12214 return false;
12215 tree t = TREE_TYPE (target);
12216 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12217 t = TREE_TYPE (t);
12218 if (TREE_CODE (t) == FUNCTION_TYPE)
12219 return false;
12220 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12221 /* If we do not have BINFO associated, it means that type was built
12222 without devirtualization enabled. Do not consider this a virtual
12223 call. */
12224 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12225 return false;
12226 return true;
12229 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12231 static tree
12232 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12234 unsigned int i;
12235 tree base_binfo, b;
12237 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12238 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12239 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12240 return base_binfo;
12241 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12242 return b;
12243 return NULL;
12246 /* Try to find a base info of BINFO that would have its field decl at offset
12247 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12248 found, return, otherwise return NULL_TREE. */
12250 tree
12251 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12253 tree type = BINFO_TYPE (binfo);
12255 while (true)
12257 HOST_WIDE_INT pos, size;
12258 tree fld;
12259 int i;
12261 if (types_same_for_odr (type, expected_type))
12262 return binfo;
12263 if (maybe_lt (offset, 0))
12264 return NULL_TREE;
12266 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12268 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12269 continue;
12271 pos = int_bit_position (fld);
12272 size = tree_to_uhwi (DECL_SIZE (fld));
12273 if (known_in_range_p (offset, pos, size))
12274 break;
12276 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12277 return NULL_TREE;
12279 /* Offset 0 indicates the primary base, whose vtable contents are
12280 represented in the binfo for the derived class. */
12281 else if (maybe_ne (offset, 0))
12283 tree found_binfo = NULL, base_binfo;
12284 /* Offsets in BINFO are in bytes relative to the whole structure
12285 while POS is in bits relative to the containing field. */
12286 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12287 / BITS_PER_UNIT);
12289 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12290 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12291 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12293 found_binfo = base_binfo;
12294 break;
12296 if (found_binfo)
12297 binfo = found_binfo;
12298 else
12299 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12300 binfo_offset);
12303 type = TREE_TYPE (fld);
12304 offset -= pos;
12308 /* PR 84195: Replace control characters in "unescaped" with their
12309 escaped equivalents. Allow newlines if -fmessage-length has
12310 been set to a non-zero value. This is done here, rather than
12311 where the attribute is recorded as the message length can
12312 change between these two locations. */
12314 void
12315 escaped_string::escape (const char *unescaped)
12317 char *escaped;
12318 size_t i, new_i, len;
12320 if (m_owned)
12321 free (m_str);
12323 m_str = const_cast<char *> (unescaped);
12324 m_owned = false;
12326 if (unescaped == NULL || *unescaped == 0)
12327 return;
12329 len = strlen (unescaped);
12330 escaped = NULL;
12331 new_i = 0;
12333 for (i = 0; i < len; i++)
12335 char c = unescaped[i];
12337 if (!ISCNTRL (c))
12339 if (escaped)
12340 escaped[new_i++] = c;
12341 continue;
12344 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12346 if (escaped == NULL)
12348 /* We only allocate space for a new string if we
12349 actually encounter a control character that
12350 needs replacing. */
12351 escaped = (char *) xmalloc (len * 2 + 1);
12352 strncpy (escaped, unescaped, i);
12353 new_i = i;
12356 escaped[new_i++] = '\\';
12358 switch (c)
12360 case '\a': escaped[new_i++] = 'a'; break;
12361 case '\b': escaped[new_i++] = 'b'; break;
12362 case '\f': escaped[new_i++] = 'f'; break;
12363 case '\n': escaped[new_i++] = 'n'; break;
12364 case '\r': escaped[new_i++] = 'r'; break;
12365 case '\t': escaped[new_i++] = 't'; break;
12366 case '\v': escaped[new_i++] = 'v'; break;
12367 default: escaped[new_i++] = '?'; break;
12370 else if (escaped)
12371 escaped[new_i++] = c;
12374 if (escaped)
12376 escaped[new_i] = 0;
12377 m_str = escaped;
12378 m_owned = true;
12382 /* Warn about a use of an identifier which was marked deprecated. Returns
12383 whether a warning was given. */
12385 bool
12386 warn_deprecated_use (tree node, tree attr)
12388 escaped_string msg;
12390 if (node == 0 || !warn_deprecated_decl)
12391 return false;
12393 if (!attr)
12395 if (DECL_P (node))
12396 attr = DECL_ATTRIBUTES (node);
12397 else if (TYPE_P (node))
12399 tree decl = TYPE_STUB_DECL (node);
12400 if (decl)
12401 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12402 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12403 != NULL_TREE)
12405 node = TREE_TYPE (decl);
12406 attr = TYPE_ATTRIBUTES (node);
12411 if (attr)
12412 attr = lookup_attribute ("deprecated", attr);
12414 if (attr)
12415 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12417 bool w = false;
12418 if (DECL_P (node))
12420 auto_diagnostic_group d;
12421 if (msg)
12422 w = warning (OPT_Wdeprecated_declarations,
12423 "%qD is deprecated: %s", node, (const char *) msg);
12424 else
12425 w = warning (OPT_Wdeprecated_declarations,
12426 "%qD is deprecated", node);
12427 if (w)
12428 inform (DECL_SOURCE_LOCATION (node), "declared here");
12430 else if (TYPE_P (node))
12432 tree what = NULL_TREE;
12433 tree decl = TYPE_STUB_DECL (node);
12435 if (TYPE_NAME (node))
12437 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12438 what = TYPE_NAME (node);
12439 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12440 && DECL_NAME (TYPE_NAME (node)))
12441 what = DECL_NAME (TYPE_NAME (node));
12444 auto_diagnostic_group d;
12445 if (what)
12447 if (msg)
12448 w = warning (OPT_Wdeprecated_declarations,
12449 "%qE is deprecated: %s", what, (const char *) msg);
12450 else
12451 w = warning (OPT_Wdeprecated_declarations,
12452 "%qE is deprecated", what);
12454 else
12456 if (msg)
12457 w = warning (OPT_Wdeprecated_declarations,
12458 "type is deprecated: %s", (const char *) msg);
12459 else
12460 w = warning (OPT_Wdeprecated_declarations,
12461 "type is deprecated");
12464 if (w && decl)
12465 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12468 return w;
12471 /* Error out with an identifier which was marked 'unavailable'. */
12472 void
12473 error_unavailable_use (tree node, tree attr)
12475 escaped_string msg;
12477 if (node == 0)
12478 return;
12480 if (!attr)
12482 if (DECL_P (node))
12483 attr = DECL_ATTRIBUTES (node);
12484 else if (TYPE_P (node))
12486 tree decl = TYPE_STUB_DECL (node);
12487 if (decl)
12488 attr = lookup_attribute ("unavailable",
12489 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12493 if (attr)
12494 attr = lookup_attribute ("unavailable", attr);
12496 if (attr)
12497 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12499 if (DECL_P (node))
12501 auto_diagnostic_group d;
12502 if (msg)
12503 error ("%qD is unavailable: %s", node, (const char *) msg);
12504 else
12505 error ("%qD is unavailable", node);
12506 inform (DECL_SOURCE_LOCATION (node), "declared here");
12508 else if (TYPE_P (node))
12510 tree what = NULL_TREE;
12511 tree decl = TYPE_STUB_DECL (node);
12513 if (TYPE_NAME (node))
12515 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12516 what = TYPE_NAME (node);
12517 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12518 && DECL_NAME (TYPE_NAME (node)))
12519 what = DECL_NAME (TYPE_NAME (node));
12522 auto_diagnostic_group d;
12523 if (what)
12525 if (msg)
12526 error ("%qE is unavailable: %s", what, (const char *) msg);
12527 else
12528 error ("%qE is unavailable", what);
12530 else
12532 if (msg)
12533 error ("type is unavailable: %s", (const char *) msg);
12534 else
12535 error ("type is unavailable");
12538 if (decl)
12539 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12543 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12544 somewhere in it. */
12546 bool
12547 contains_bitfld_component_ref_p (const_tree ref)
12549 while (handled_component_p (ref))
12551 if (TREE_CODE (ref) == COMPONENT_REF
12552 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12553 return true;
12554 ref = TREE_OPERAND (ref, 0);
12557 return false;
12560 /* Try to determine whether a TRY_CATCH expression can fall through.
12561 This is a subroutine of block_may_fallthru. */
12563 static bool
12564 try_catch_may_fallthru (const_tree stmt)
12566 tree_stmt_iterator i;
12568 /* If the TRY block can fall through, the whole TRY_CATCH can
12569 fall through. */
12570 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12571 return true;
12573 i = tsi_start (TREE_OPERAND (stmt, 1));
12574 switch (TREE_CODE (tsi_stmt (i)))
12576 case CATCH_EXPR:
12577 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12578 catch expression and a body. The whole TRY_CATCH may fall
12579 through iff any of the catch bodies falls through. */
12580 for (; !tsi_end_p (i); tsi_next (&i))
12582 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12583 return true;
12585 return false;
12587 case EH_FILTER_EXPR:
12588 /* The exception filter expression only matters if there is an
12589 exception. If the exception does not match EH_FILTER_TYPES,
12590 we will execute EH_FILTER_FAILURE, and we will fall through
12591 if that falls through. If the exception does match
12592 EH_FILTER_TYPES, the stack unwinder will continue up the
12593 stack, so we will not fall through. We don't know whether we
12594 will throw an exception which matches EH_FILTER_TYPES or not,
12595 so we just ignore EH_FILTER_TYPES and assume that we might
12596 throw an exception which doesn't match. */
12597 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12599 default:
12600 /* This case represents statements to be executed when an
12601 exception occurs. Those statements are implicitly followed
12602 by a RESX statement to resume execution after the exception.
12603 So in this case the TRY_CATCH never falls through. */
12604 return false;
12608 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12609 need not be 100% accurate; simply be conservative and return true if we
12610 don't know. This is used only to avoid stupidly generating extra code.
12611 If we're wrong, we'll just delete the extra code later. */
12613 bool
12614 block_may_fallthru (const_tree block)
12616 /* This CONST_CAST is okay because expr_last returns its argument
12617 unmodified and we assign it to a const_tree. */
12618 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12620 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12622 case GOTO_EXPR:
12623 case RETURN_EXPR:
12624 /* Easy cases. If the last statement of the block implies
12625 control transfer, then we can't fall through. */
12626 return false;
12628 case SWITCH_EXPR:
12629 /* If there is a default: label or case labels cover all possible
12630 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12631 to some case label in all cases and all we care is whether the
12632 SWITCH_BODY falls through. */
12633 if (SWITCH_ALL_CASES_P (stmt))
12634 return block_may_fallthru (SWITCH_BODY (stmt));
12635 return true;
12637 case COND_EXPR:
12638 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12639 return true;
12640 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12642 case BIND_EXPR:
12643 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12645 case TRY_CATCH_EXPR:
12646 return try_catch_may_fallthru (stmt);
12648 case TRY_FINALLY_EXPR:
12649 /* The finally clause is always executed after the try clause,
12650 so if it does not fall through, then the try-finally will not
12651 fall through. Otherwise, if the try clause does not fall
12652 through, then when the finally clause falls through it will
12653 resume execution wherever the try clause was going. So the
12654 whole try-finally will only fall through if both the try
12655 clause and the finally clause fall through. */
12656 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12657 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12659 case EH_ELSE_EXPR:
12660 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12662 case MODIFY_EXPR:
12663 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12664 stmt = TREE_OPERAND (stmt, 1);
12665 else
12666 return true;
12667 /* FALLTHRU */
12669 case CALL_EXPR:
12670 /* Functions that do not return do not fall through. */
12671 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12673 case CLEANUP_POINT_EXPR:
12674 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12676 case TARGET_EXPR:
12677 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12679 case ERROR_MARK:
12680 return true;
12682 default:
12683 return lang_hooks.block_may_fallthru (stmt);
12687 /* True if we are using EH to handle cleanups. */
12688 static bool using_eh_for_cleanups_flag = false;
12690 /* This routine is called from front ends to indicate eh should be used for
12691 cleanups. */
12692 void
12693 using_eh_for_cleanups (void)
12695 using_eh_for_cleanups_flag = true;
12698 /* Query whether EH is used for cleanups. */
12699 bool
12700 using_eh_for_cleanups_p (void)
12702 return using_eh_for_cleanups_flag;
12705 /* Wrapper for tree_code_name to ensure that tree code is valid */
12706 const char *
12707 get_tree_code_name (enum tree_code code)
12709 const char *invalid = "<invalid tree code>";
12711 /* The tree_code enum promotes to signed, but we could be getting
12712 invalid values, so force an unsigned comparison. */
12713 if (unsigned (code) >= MAX_TREE_CODES)
12715 if ((unsigned)code == 0xa5a5)
12716 return "ggc_freed";
12717 return invalid;
12720 return tree_code_name[code];
12723 /* Drops the TREE_OVERFLOW flag from T. */
12725 tree
12726 drop_tree_overflow (tree t)
12728 gcc_checking_assert (TREE_OVERFLOW (t));
12730 /* For tree codes with a sharing machinery re-build the result. */
12731 if (poly_int_tree_p (t))
12732 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12734 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12735 and canonicalize the result. */
12736 if (TREE_CODE (t) == VECTOR_CST)
12738 tree_vector_builder builder;
12739 builder.new_unary_operation (TREE_TYPE (t), t, true);
12740 unsigned int count = builder.encoded_nelts ();
12741 for (unsigned int i = 0; i < count; ++i)
12743 tree elt = VECTOR_CST_ELT (t, i);
12744 if (TREE_OVERFLOW (elt))
12745 elt = drop_tree_overflow (elt);
12746 builder.quick_push (elt);
12748 return builder.build ();
12751 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12752 and drop the flag. */
12753 t = copy_node (t);
12754 TREE_OVERFLOW (t) = 0;
12756 /* For constants that contain nested constants, drop the flag
12757 from those as well. */
12758 if (TREE_CODE (t) == COMPLEX_CST)
12760 if (TREE_OVERFLOW (TREE_REALPART (t)))
12761 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12762 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12763 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12766 return t;
12769 /* Given a memory reference expression T, return its base address.
12770 The base address of a memory reference expression is the main
12771 object being referenced. For instance, the base address for
12772 'array[i].fld[j]' is 'array'. You can think of this as stripping
12773 away the offset part from a memory address.
12775 This function calls handled_component_p to strip away all the inner
12776 parts of the memory reference until it reaches the base object. */
12778 tree
12779 get_base_address (tree t)
12781 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12782 t = TREE_OPERAND (t, 0);
12783 while (handled_component_p (t))
12784 t = TREE_OPERAND (t, 0);
12786 if ((TREE_CODE (t) == MEM_REF
12787 || TREE_CODE (t) == TARGET_MEM_REF)
12788 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12789 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12791 return t;
12794 /* Return a tree of sizetype representing the size, in bytes, of the element
12795 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12797 tree
12798 array_ref_element_size (tree exp)
12800 tree aligned_size = TREE_OPERAND (exp, 3);
12801 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12802 location_t loc = EXPR_LOCATION (exp);
12804 /* If a size was specified in the ARRAY_REF, it's the size measured
12805 in alignment units of the element type. So multiply by that value. */
12806 if (aligned_size)
12808 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12809 sizetype from another type of the same width and signedness. */
12810 if (TREE_TYPE (aligned_size) != sizetype)
12811 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12812 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12813 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12816 /* Otherwise, take the size from that of the element type. Substitute
12817 any PLACEHOLDER_EXPR that we have. */
12818 else
12819 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12822 /* Return a tree representing the lower bound of the array mentioned in
12823 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12825 tree
12826 array_ref_low_bound (tree exp)
12828 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12830 /* If a lower bound is specified in EXP, use it. */
12831 if (TREE_OPERAND (exp, 2))
12832 return TREE_OPERAND (exp, 2);
12834 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12835 substituting for a PLACEHOLDER_EXPR as needed. */
12836 if (domain_type && TYPE_MIN_VALUE (domain_type))
12837 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12839 /* Otherwise, return a zero of the appropriate type. */
12840 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12841 return (idxtype == error_mark_node
12842 ? integer_zero_node : build_int_cst (idxtype, 0));
12845 /* Return a tree representing the upper bound of the array mentioned in
12846 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12848 tree
12849 array_ref_up_bound (tree exp)
12851 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12853 /* If there is a domain type and it has an upper bound, use it, substituting
12854 for a PLACEHOLDER_EXPR as needed. */
12855 if (domain_type && TYPE_MAX_VALUE (domain_type))
12856 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12858 /* Otherwise fail. */
12859 return NULL_TREE;
12862 /* Returns true if REF is an array reference, a component reference,
12863 or a memory reference to an array whose actual size might be larger
12864 than its upper bound implies, there are multiple cases:
12865 A. a ref to a flexible array member at the end of a structure;
12866 B. a ref to an array with a different type against the original decl;
12867 for example:
12869 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12870 (*((char(*)[16])&a[0]))[i+8]
12872 C. a ref to an array that was passed as a parameter;
12873 for example:
12875 int test (uint8_t *p, uint32_t t[1][1], int n) {
12876 for (int i = 0; i < 4; i++, p++)
12877 t[i][0] = ...;
12879 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12882 bool
12883 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12885 /* The TYPE for this array referece. */
12886 tree atype = NULL_TREE;
12887 /* The FIELD_DECL for the array field in the containing structure. */
12888 tree afield_decl = NULL_TREE;
12889 /* Whether this array is the trailing array of a structure. */
12890 bool is_trailing_array_tmp = false;
12891 if (!is_trailing_array)
12892 is_trailing_array = &is_trailing_array_tmp;
12894 if (TREE_CODE (ref) == ARRAY_REF
12895 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12897 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12898 ref = TREE_OPERAND (ref, 0);
12900 else if (TREE_CODE (ref) == COMPONENT_REF
12901 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12903 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12904 afield_decl = TREE_OPERAND (ref, 1);
12906 else if (TREE_CODE (ref) == MEM_REF)
12908 tree arg = TREE_OPERAND (ref, 0);
12909 if (TREE_CODE (arg) == ADDR_EXPR)
12910 arg = TREE_OPERAND (arg, 0);
12911 tree argtype = TREE_TYPE (arg);
12912 if (TREE_CODE (argtype) == RECORD_TYPE)
12914 if (tree fld = last_field (argtype))
12916 atype = TREE_TYPE (fld);
12917 afield_decl = fld;
12918 if (TREE_CODE (atype) != ARRAY_TYPE)
12919 return false;
12920 if (VAR_P (arg) && DECL_SIZE (fld))
12921 return false;
12923 else
12924 return false;
12926 else
12927 return false;
12929 else
12930 return false;
12932 if (TREE_CODE (ref) == STRING_CST)
12933 return false;
12935 tree ref_to_array = ref;
12936 while (handled_component_p (ref))
12938 /* If the reference chain contains a component reference to a
12939 non-union type and there follows another field the reference
12940 is not at the end of a structure. */
12941 if (TREE_CODE (ref) == COMPONENT_REF)
12943 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12945 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12946 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12947 nextf = DECL_CHAIN (nextf);
12948 if (nextf)
12949 return false;
12952 /* If we have a multi-dimensional array we do not consider
12953 a non-innermost dimension as flex array if the whole
12954 multi-dimensional array is at struct end.
12955 Same for an array of aggregates with a trailing array
12956 member. */
12957 else if (TREE_CODE (ref) == ARRAY_REF)
12958 return false;
12959 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12961 /* If we view an underlying object as sth else then what we
12962 gathered up to now is what we have to rely on. */
12963 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12964 break;
12965 else
12966 gcc_unreachable ();
12968 ref = TREE_OPERAND (ref, 0);
12971 gcc_assert (!afield_decl
12972 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12974 /* The array now is at struct end. Treat flexible array member as
12975 always subject to extend, even into just padding constrained by
12976 an underlying decl. */
12977 if (! TYPE_SIZE (atype)
12978 || ! TYPE_DOMAIN (atype)
12979 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12981 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12982 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12985 /* If the reference is based on a declared entity, the size of the array
12986 is constrained by its given domain. (Do not trust commons PR/69368). */
12987 ref = get_base_address (ref);
12988 if (ref
12989 && DECL_P (ref)
12990 && !(flag_unconstrained_commons
12991 && VAR_P (ref) && DECL_COMMON (ref))
12992 && DECL_SIZE_UNIT (ref)
12993 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12995 /* If the object itself is the array it is not at struct end. */
12996 if (DECL_P (ref_to_array))
12997 return false;
12999 /* Check whether the array domain covers all of the available
13000 padding. */
13001 poly_int64 offset;
13002 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13003 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13004 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13006 *is_trailing_array
13007 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13008 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13010 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13012 *is_trailing_array
13013 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13014 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13017 /* If at least one extra element fits it is a flexarray. */
13018 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13019 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13020 + 2)
13021 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13022 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13024 *is_trailing_array
13025 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13026 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13029 return false;
13032 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13033 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13037 /* Return a tree representing the offset, in bytes, of the field referenced
13038 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13040 tree
13041 component_ref_field_offset (tree exp)
13043 tree aligned_offset = TREE_OPERAND (exp, 2);
13044 tree field = TREE_OPERAND (exp, 1);
13045 location_t loc = EXPR_LOCATION (exp);
13047 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13048 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13049 value. */
13050 if (aligned_offset)
13052 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13053 sizetype from another type of the same width and signedness. */
13054 if (TREE_TYPE (aligned_offset) != sizetype)
13055 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13056 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13057 size_int (DECL_OFFSET_ALIGN (field)
13058 / BITS_PER_UNIT));
13061 /* Otherwise, take the offset from that of the field. Substitute
13062 any PLACEHOLDER_EXPR that we have. */
13063 else
13064 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13067 /* Given the initializer INIT, return the initializer for the field
13068 DECL if it exists, otherwise null. Used to obtain the initializer
13069 for a flexible array member and determine its size. */
13071 static tree
13072 get_initializer_for (tree init, tree decl)
13074 STRIP_NOPS (init);
13076 tree fld, fld_init;
13077 unsigned HOST_WIDE_INT i;
13078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13080 if (decl == fld)
13081 return fld_init;
13083 if (TREE_CODE (fld) == CONSTRUCTOR)
13085 fld_init = get_initializer_for (fld_init, decl);
13086 if (fld_init)
13087 return fld_init;
13091 return NULL_TREE;
13094 /* Determines the special array member type for the array reference REF. */
13095 special_array_member
13096 component_ref_sam_type (tree ref)
13098 special_array_member sam_type = special_array_member::none;
13100 tree member = TREE_OPERAND (ref, 1);
13101 tree memsize = DECL_SIZE_UNIT (member);
13102 if (memsize)
13104 tree memtype = TREE_TYPE (member);
13105 if (TREE_CODE (memtype) != ARRAY_TYPE)
13106 return sam_type;
13108 bool trailing = false;
13109 (void) array_ref_flexible_size_p (ref, &trailing);
13110 bool zero_elts = integer_zerop (memsize);
13111 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13113 /* If array element has zero size, verify if it is a flexible
13114 array member or zero length array. Clear zero_elts if
13115 it has one or more members or is a VLA member. */
13116 if (tree dom = TYPE_DOMAIN (memtype))
13117 if (tree min = TYPE_MIN_VALUE (dom))
13118 if (tree max = TYPE_MAX_VALUE (dom))
13119 if (TREE_CODE (min) != INTEGER_CST
13120 || TREE_CODE (max) != INTEGER_CST
13121 || !((integer_zerop (min) && integer_all_onesp (max))
13122 || tree_int_cst_lt (max, min)))
13123 zero_elts = false;
13125 if (!trailing && !zero_elts)
13126 /* MEMBER is an interior array with more than one element. */
13127 return special_array_member::int_n;
13129 if (zero_elts)
13131 if (trailing)
13132 return special_array_member::trail_0;
13133 else
13134 return special_array_member::int_0;
13137 if (!zero_elts)
13138 if (tree dom = TYPE_DOMAIN (memtype))
13139 if (tree min = TYPE_MIN_VALUE (dom))
13140 if (tree max = TYPE_MAX_VALUE (dom))
13141 if (TREE_CODE (min) == INTEGER_CST
13142 && TREE_CODE (max) == INTEGER_CST)
13144 offset_int minidx = wi::to_offset (min);
13145 offset_int maxidx = wi::to_offset (max);
13146 offset_int neltsm1 = maxidx - minidx;
13147 if (neltsm1 > 0)
13148 /* MEMBER is a trailing array with more than
13149 one elements. */
13150 return special_array_member::trail_n;
13152 if (neltsm1 == 0)
13153 return special_array_member::trail_1;
13157 return sam_type;
13160 /* Determines the size of the member referenced by the COMPONENT_REF
13161 REF, using its initializer expression if necessary in order to
13162 determine the size of an initialized flexible array member.
13163 If non-null, set *SAM to the type of special array member.
13164 Returns the size as sizetype (which might be zero for an object
13165 with an uninitialized flexible array member) or null if the size
13166 cannot be determined. */
13168 tree
13169 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13171 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13173 special_array_member sambuf;
13174 if (!sam)
13175 sam = &sambuf;
13176 *sam = component_ref_sam_type (ref);
13178 /* The object/argument referenced by the COMPONENT_REF and its type. */
13179 tree arg = TREE_OPERAND (ref, 0);
13180 tree argtype = TREE_TYPE (arg);
13181 /* The referenced member. */
13182 tree member = TREE_OPERAND (ref, 1);
13184 tree memsize = DECL_SIZE_UNIT (member);
13185 if (memsize)
13187 tree memtype = TREE_TYPE (member);
13188 if (TREE_CODE (memtype) != ARRAY_TYPE)
13189 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13190 to the type of a class with a virtual base which doesn't
13191 reflect the size of the virtual's members (see pr97595).
13192 If that's the case fail for now and implement something
13193 more robust in the future. */
13194 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13195 ? memsize : NULL_TREE);
13197 /* 2-or-more elements arrays are treated as normal arrays by default. */
13198 if (*sam == special_array_member::int_n
13199 || *sam == special_array_member::trail_n)
13200 return memsize;
13202 tree afield_decl = TREE_OPERAND (ref, 1);
13203 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13204 /* If the trailing array is a not a flexible array member, treat it as
13205 a normal array. */
13206 if (DECL_NOT_FLEXARRAY (afield_decl)
13207 && *sam != special_array_member::int_0)
13208 return memsize;
13210 if (*sam == special_array_member::int_0)
13211 memsize = NULL_TREE;
13213 /* For a reference to a flexible array member of a union
13214 use the size of the union instead of the size of the member. */
13215 if (TREE_CODE (argtype) == UNION_TYPE)
13216 memsize = TYPE_SIZE_UNIT (argtype);
13219 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13220 array member, or an array of length one treated as such. */
13222 /* If the reference is to a declared object and the member a true
13223 flexible array, try to determine its size from its initializer. */
13224 poly_int64 baseoff = 0;
13225 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13226 if (!base || !VAR_P (base))
13228 if (*sam != special_array_member::int_0)
13229 return NULL_TREE;
13231 if (TREE_CODE (arg) != COMPONENT_REF)
13232 return NULL_TREE;
13234 base = arg;
13235 while (TREE_CODE (base) == COMPONENT_REF)
13236 base = TREE_OPERAND (base, 0);
13237 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13240 /* BASE is the declared object of which MEMBER is either a member
13241 or that is cast to ARGTYPE (e.g., a char buffer used to store
13242 an ARGTYPE object). */
13243 tree basetype = TREE_TYPE (base);
13245 /* Determine the base type of the referenced object. If it's
13246 the same as ARGTYPE and MEMBER has a known size, return it. */
13247 tree bt = basetype;
13248 if (*sam != special_array_member::int_0)
13249 while (TREE_CODE (bt) == ARRAY_TYPE)
13250 bt = TREE_TYPE (bt);
13251 bool typematch = useless_type_conversion_p (argtype, bt);
13252 if (memsize && typematch)
13253 return memsize;
13255 memsize = NULL_TREE;
13257 if (typematch)
13258 /* MEMBER is a true flexible array member. Compute its size from
13259 the initializer of the BASE object if it has one. */
13260 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13261 if (init != error_mark_node)
13263 init = get_initializer_for (init, member);
13264 if (init)
13266 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13267 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13269 /* Use the larger of the initializer size and the tail
13270 padding in the enclosing struct. */
13271 poly_int64 rsz = tree_to_poly_int64 (refsize);
13272 rsz -= baseoff;
13273 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13274 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13277 baseoff = 0;
13281 if (!memsize)
13283 if (typematch)
13285 if (DECL_P (base)
13286 && DECL_EXTERNAL (base)
13287 && bt == basetype
13288 && *sam != special_array_member::int_0)
13289 /* The size of a flexible array member of an extern struct
13290 with no initializer cannot be determined (it's defined
13291 in another translation unit and can have an initializer
13292 with an arbitrary number of elements). */
13293 return NULL_TREE;
13295 /* Use the size of the base struct or, for interior zero-length
13296 arrays, the size of the enclosing type. */
13297 memsize = TYPE_SIZE_UNIT (bt);
13299 else if (DECL_P (base))
13300 /* Use the size of the BASE object (possibly an array of some
13301 other type such as char used to store the struct). */
13302 memsize = DECL_SIZE_UNIT (base);
13303 else
13304 return NULL_TREE;
13307 /* If the flexible array member has a known size use the greater
13308 of it and the tail padding in the enclosing struct.
13309 Otherwise, when the size of the flexible array member is unknown
13310 and the referenced object is not a struct, use the size of its
13311 type when known. This detects sizes of array buffers when cast
13312 to struct types with flexible array members. */
13313 if (memsize)
13315 if (!tree_fits_poly_int64_p (memsize))
13316 return NULL_TREE;
13317 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13318 if (known_lt (baseoff, memsz64))
13320 memsz64 -= baseoff;
13321 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13323 return size_zero_node;
13326 /* Return "don't know" for an external non-array object since its
13327 flexible array member can be initialized to have any number of
13328 elements. Otherwise, return zero because the flexible array
13329 member has no elements. */
13330 return (DECL_P (base)
13331 && DECL_EXTERNAL (base)
13332 && (!typematch
13333 || TREE_CODE (basetype) != ARRAY_TYPE)
13334 ? NULL_TREE : size_zero_node);
13337 /* Return the machine mode of T. For vectors, returns the mode of the
13338 inner type. The main use case is to feed the result to HONOR_NANS,
13339 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13341 machine_mode
13342 element_mode (const_tree t)
13344 if (!TYPE_P (t))
13345 t = TREE_TYPE (t);
13346 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13347 t = TREE_TYPE (t);
13348 return TYPE_MODE (t);
13351 /* Vector types need to re-check the target flags each time we report
13352 the machine mode. We need to do this because attribute target can
13353 change the result of vector_mode_supported_p and have_regs_of_mode
13354 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13355 change on a per-function basis. */
13356 /* ??? Possibly a better solution is to run through all the types
13357 referenced by a function and re-compute the TYPE_MODE once, rather
13358 than make the TYPE_MODE macro call a function. */
13360 machine_mode
13361 vector_type_mode (const_tree t)
13363 machine_mode mode;
13365 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13367 mode = t->type_common.mode;
13368 if (VECTOR_MODE_P (mode)
13369 && (!targetm.vector_mode_supported_p (mode)
13370 || !have_regs_of_mode[mode]))
13372 scalar_int_mode innermode;
13374 /* For integers, try mapping it to a same-sized scalar mode. */
13375 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13377 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13378 * GET_MODE_BITSIZE (innermode));
13379 scalar_int_mode mode;
13380 if (int_mode_for_size (size, 0).exists (&mode)
13381 && have_regs_of_mode[mode])
13382 return mode;
13385 return BLKmode;
13388 return mode;
13391 /* Return the size in bits of each element of vector type TYPE. */
13393 unsigned int
13394 vector_element_bits (const_tree type)
13396 gcc_checking_assert (VECTOR_TYPE_P (type));
13397 if (VECTOR_BOOLEAN_TYPE_P (type))
13398 return TYPE_PRECISION (TREE_TYPE (type));
13399 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13402 /* Calculate the size in bits of each element of vector type TYPE
13403 and return the result as a tree of type bitsizetype. */
13405 tree
13406 vector_element_bits_tree (const_tree type)
13408 gcc_checking_assert (VECTOR_TYPE_P (type));
13409 if (VECTOR_BOOLEAN_TYPE_P (type))
13410 return bitsize_int (vector_element_bits (type));
13411 return TYPE_SIZE (TREE_TYPE (type));
13414 /* Verify that basic properties of T match TV and thus T can be a variant of
13415 TV. TV should be the more specified variant (i.e. the main variant). */
13417 static bool
13418 verify_type_variant (const_tree t, tree tv)
13420 /* Type variant can differ by:
13422 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13423 ENCODE_QUAL_ADDR_SPACE.
13424 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13425 in this case some values may not be set in the variant types
13426 (see TYPE_COMPLETE_P checks).
13427 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13428 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13429 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13430 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13431 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13432 this is necessary to make it possible to merge types form different TUs
13433 - arrays, pointers and references may have TREE_TYPE that is a variant
13434 of TREE_TYPE of their main variants.
13435 - aggregates may have new TYPE_FIELDS list that list variants of
13436 the main variant TYPE_FIELDS.
13437 - vector types may differ by TYPE_VECTOR_OPAQUE
13440 /* Convenience macro for matching individual fields. */
13441 #define verify_variant_match(flag) \
13442 do { \
13443 if (flag (tv) != flag (t)) \
13445 error ("type variant differs by %s", #flag); \
13446 debug_tree (tv); \
13447 return false; \
13449 } while (false)
13451 /* tree_base checks. */
13453 verify_variant_match (TREE_CODE);
13454 /* FIXME: Ada builds non-artificial variants of artificial types. */
13455 #if 0
13456 if (TYPE_ARTIFICIAL (tv))
13457 verify_variant_match (TYPE_ARTIFICIAL);
13458 #endif
13459 if (POINTER_TYPE_P (tv))
13460 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13461 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13462 verify_variant_match (TYPE_UNSIGNED);
13463 verify_variant_match (TYPE_PACKED);
13464 if (TREE_CODE (t) == REFERENCE_TYPE)
13465 verify_variant_match (TYPE_REF_IS_RVALUE);
13466 if (AGGREGATE_TYPE_P (t))
13467 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13468 else
13469 verify_variant_match (TYPE_SATURATING);
13470 /* FIXME: This check trigger during libstdc++ build. */
13471 #if 0
13472 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13473 verify_variant_match (TYPE_FINAL_P);
13474 #endif
13476 /* tree_type_common checks. */
13478 if (COMPLETE_TYPE_P (t))
13480 verify_variant_match (TYPE_MODE);
13481 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13482 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13483 verify_variant_match (TYPE_SIZE);
13484 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13485 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13486 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13488 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13489 TYPE_SIZE_UNIT (tv), 0));
13490 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13491 debug_tree (tv);
13492 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13493 debug_tree (TYPE_SIZE_UNIT (tv));
13494 error ("type%'s %<TYPE_SIZE_UNIT%>");
13495 debug_tree (TYPE_SIZE_UNIT (t));
13496 return false;
13498 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13500 verify_variant_match (TYPE_PRECISION_RAW);
13501 if (RECORD_OR_UNION_TYPE_P (t))
13502 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13503 else if (TREE_CODE (t) == ARRAY_TYPE)
13504 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13505 /* During LTO we merge variant lists from diferent translation units
13506 that may differ BY TYPE_CONTEXT that in turn may point
13507 to TRANSLATION_UNIT_DECL.
13508 Ada also builds variants of types with different TYPE_CONTEXT. */
13509 #if 0
13510 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13511 verify_variant_match (TYPE_CONTEXT);
13512 #endif
13513 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13514 verify_variant_match (TYPE_STRING_FLAG);
13515 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13516 verify_variant_match (TYPE_CXX_ODR_P);
13517 if (TYPE_ALIAS_SET_KNOWN_P (t))
13519 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13520 debug_tree (tv);
13521 return false;
13524 /* tree_type_non_common checks. */
13526 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13527 and dangle the pointer from time to time. */
13528 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13529 && (in_lto_p || !TYPE_VFIELD (tv)
13530 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13532 error ("type variant has different %<TYPE_VFIELD%>");
13533 debug_tree (tv);
13534 return false;
13536 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13537 || TREE_CODE (t) == INTEGER_TYPE
13538 || TREE_CODE (t) == BOOLEAN_TYPE
13539 || TREE_CODE (t) == BITINT_TYPE
13540 || SCALAR_FLOAT_TYPE_P (t)
13541 || FIXED_POINT_TYPE_P (t))
13543 verify_variant_match (TYPE_MAX_VALUE);
13544 verify_variant_match (TYPE_MIN_VALUE);
13546 if (TREE_CODE (t) == METHOD_TYPE)
13547 verify_variant_match (TYPE_METHOD_BASETYPE);
13548 if (TREE_CODE (t) == OFFSET_TYPE)
13549 verify_variant_match (TYPE_OFFSET_BASETYPE);
13550 if (TREE_CODE (t) == ARRAY_TYPE)
13551 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13552 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13553 or even type's main variant. This is needed to make bootstrap pass
13554 and the bug seems new in GCC 5.
13555 C++ FE should be updated to make this consistent and we should check
13556 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13557 is a match with main variant.
13559 Also disable the check for Java for now because of parser hack that builds
13560 first an dummy BINFO and then sometimes replace it by real BINFO in some
13561 of the copies. */
13562 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13563 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13564 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13565 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13566 at LTO time only. */
13567 && (in_lto_p && odr_type_p (t)))
13569 error ("type variant has different %<TYPE_BINFO%>");
13570 debug_tree (tv);
13571 error ("type variant%'s %<TYPE_BINFO%>");
13572 debug_tree (TYPE_BINFO (tv));
13573 error ("type%'s %<TYPE_BINFO%>");
13574 debug_tree (TYPE_BINFO (t));
13575 return false;
13578 /* Check various uses of TYPE_VALUES_RAW. */
13579 if (TREE_CODE (t) == ENUMERAL_TYPE
13580 && TYPE_VALUES (t))
13581 verify_variant_match (TYPE_VALUES);
13582 else if (TREE_CODE (t) == ARRAY_TYPE)
13583 verify_variant_match (TYPE_DOMAIN);
13584 /* Permit incomplete variants of complete type. While FEs may complete
13585 all variants, this does not happen for C++ templates in all cases. */
13586 else if (RECORD_OR_UNION_TYPE_P (t)
13587 && COMPLETE_TYPE_P (t)
13588 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13590 tree f1, f2;
13592 /* Fortran builds qualified variants as new records with items of
13593 qualified type. Verify that they looks same. */
13594 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13595 f1 && f2;
13596 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13597 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13598 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13599 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13600 /* FIXME: gfc_nonrestricted_type builds all types as variants
13601 with exception of pointer types. It deeply copies the type
13602 which means that we may end up with a variant type
13603 referring non-variant pointer. We may change it to
13604 produce types as variants, too, like
13605 objc_get_protocol_qualified_type does. */
13606 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13607 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13608 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13609 break;
13610 if (f1 || f2)
13612 error ("type variant has different %<TYPE_FIELDS%>");
13613 debug_tree (tv);
13614 error ("first mismatch is field");
13615 debug_tree (f1);
13616 error ("and field");
13617 debug_tree (f2);
13618 return false;
13621 else if (FUNC_OR_METHOD_TYPE_P (t))
13622 verify_variant_match (TYPE_ARG_TYPES);
13623 /* For C++ the qualified variant of array type is really an array type
13624 of qualified TREE_TYPE.
13625 objc builds variants of pointer where pointer to type is a variant, too
13626 in objc_get_protocol_qualified_type. */
13627 if (TREE_TYPE (t) != TREE_TYPE (tv)
13628 && ((TREE_CODE (t) != ARRAY_TYPE
13629 && !POINTER_TYPE_P (t))
13630 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13631 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13633 error ("type variant has different %<TREE_TYPE%>");
13634 debug_tree (tv);
13635 error ("type variant%'s %<TREE_TYPE%>");
13636 debug_tree (TREE_TYPE (tv));
13637 error ("type%'s %<TREE_TYPE%>");
13638 debug_tree (TREE_TYPE (t));
13639 return false;
13641 if (type_with_alias_set_p (t)
13642 && !gimple_canonical_types_compatible_p (t, tv, false))
13644 error ("type is not compatible with its variant");
13645 debug_tree (tv);
13646 error ("type variant%'s %<TREE_TYPE%>");
13647 debug_tree (TREE_TYPE (tv));
13648 error ("type%'s %<TREE_TYPE%>");
13649 debug_tree (TREE_TYPE (t));
13650 return false;
13652 return true;
13653 #undef verify_variant_match
13657 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13658 the middle-end types_compatible_p function. It needs to avoid
13659 claiming types are different for types that should be treated
13660 the same with respect to TBAA. Canonical types are also used
13661 for IL consistency checks via the useless_type_conversion_p
13662 predicate which does not handle all type kinds itself but falls
13663 back to pointer-comparison of TYPE_CANONICAL for aggregates
13664 for example. */
13666 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13667 type calculation because we need to allow inter-operability between signed
13668 and unsigned variants. */
13670 bool
13671 type_with_interoperable_signedness (const_tree type)
13673 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13674 signed char and unsigned char. Similarly fortran FE builds
13675 C_SIZE_T as signed type, while C defines it unsigned. */
13677 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13678 == INTEGER_TYPE
13679 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13680 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13683 /* Return true iff T1 and T2 are structurally identical for what
13684 TBAA is concerned.
13685 This function is used both by lto.cc canonical type merging and by the
13686 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13687 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13688 only for LTO because only in these cases TYPE_CANONICAL equivalence
13689 correspond to one defined by gimple_canonical_types_compatible_p. */
13691 bool
13692 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13693 bool trust_type_canonical)
13695 /* Type variants should be same as the main variant. When not doing sanity
13696 checking to verify this fact, go to main variants and save some work. */
13697 if (trust_type_canonical)
13699 t1 = TYPE_MAIN_VARIANT (t1);
13700 t2 = TYPE_MAIN_VARIANT (t2);
13703 /* Check first for the obvious case of pointer identity. */
13704 if (t1 == t2)
13705 return true;
13707 /* Check that we have two types to compare. */
13708 if (t1 == NULL_TREE || t2 == NULL_TREE)
13709 return false;
13711 /* We consider complete types always compatible with incomplete type.
13712 This does not make sense for canonical type calculation and thus we
13713 need to ensure that we are never called on it.
13715 FIXME: For more correctness the function probably should have three modes
13716 1) mode assuming that types are complete mathcing their structure
13717 2) mode allowing incomplete types but producing equivalence classes
13718 and thus ignoring all info from complete types
13719 3) mode allowing incomplete types to match complete but checking
13720 compatibility between complete types.
13722 1 and 2 can be used for canonical type calculation. 3 is the real
13723 definition of type compatibility that can be used i.e. for warnings during
13724 declaration merging. */
13726 gcc_assert (!trust_type_canonical
13727 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13729 /* If the types have been previously registered and found equal
13730 they still are. */
13732 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13733 && trust_type_canonical)
13735 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13736 they are always NULL, but they are set to non-NULL for types
13737 constructed by build_pointer_type and variants. In this case the
13738 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13739 all pointers are considered equal. Be sure to not return false
13740 negatives. */
13741 gcc_checking_assert (canonical_type_used_p (t1)
13742 && canonical_type_used_p (t2));
13743 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13746 /* For types where we do ODR based TBAA the canonical type is always
13747 set correctly, so we know that types are different if their
13748 canonical types does not match. */
13749 if (trust_type_canonical
13750 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13751 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13752 return false;
13754 /* Can't be the same type if the types don't have the same code. */
13755 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13756 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13757 return false;
13759 /* Qualifiers do not matter for canonical type comparison purposes. */
13761 /* Void types and nullptr types are always the same. */
13762 if (VOID_TYPE_P (t1)
13763 || TREE_CODE (t1) == NULLPTR_TYPE)
13764 return true;
13766 /* Can't be the same type if they have different mode. */
13767 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13768 return false;
13770 /* Non-aggregate types can be handled cheaply. */
13771 if (INTEGRAL_TYPE_P (t1)
13772 || SCALAR_FLOAT_TYPE_P (t1)
13773 || FIXED_POINT_TYPE_P (t1)
13774 || VECTOR_TYPE_P (t1)
13775 || TREE_CODE (t1) == COMPLEX_TYPE
13776 || TREE_CODE (t1) == OFFSET_TYPE
13777 || POINTER_TYPE_P (t1))
13779 /* Can't be the same type if they have different precision. */
13780 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13781 return false;
13783 /* In some cases the signed and unsigned types are required to be
13784 inter-operable. */
13785 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13786 && !type_with_interoperable_signedness (t1))
13787 return false;
13789 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13790 interoperable with "signed char". Unless all frontends are revisited
13791 to agree on these types, we must ignore the flag completely. */
13793 /* Fortran standard define C_PTR type that is compatible with every
13794 C pointer. For this reason we need to glob all pointers into one.
13795 Still pointers in different address spaces are not compatible. */
13796 if (POINTER_TYPE_P (t1))
13798 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13799 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13800 return false;
13803 /* Tail-recurse to components. */
13804 if (VECTOR_TYPE_P (t1)
13805 || TREE_CODE (t1) == COMPLEX_TYPE)
13806 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13807 TREE_TYPE (t2),
13808 trust_type_canonical);
13810 return true;
13813 /* Do type-specific comparisons. */
13814 switch (TREE_CODE (t1))
13816 case ARRAY_TYPE:
13817 /* Array types are the same if the element types are the same and
13818 the number of elements are the same. */
13819 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13820 trust_type_canonical)
13821 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13822 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13823 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13824 return false;
13825 else
13827 tree i1 = TYPE_DOMAIN (t1);
13828 tree i2 = TYPE_DOMAIN (t2);
13830 /* For an incomplete external array, the type domain can be
13831 NULL_TREE. Check this condition also. */
13832 if (i1 == NULL_TREE && i2 == NULL_TREE)
13833 return true;
13834 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13835 return false;
13836 else
13838 tree min1 = TYPE_MIN_VALUE (i1);
13839 tree min2 = TYPE_MIN_VALUE (i2);
13840 tree max1 = TYPE_MAX_VALUE (i1);
13841 tree max2 = TYPE_MAX_VALUE (i2);
13843 /* The minimum/maximum values have to be the same. */
13844 if ((min1 == min2
13845 || (min1 && min2
13846 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13847 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13848 || operand_equal_p (min1, min2, 0))))
13849 && (max1 == max2
13850 || (max1 && max2
13851 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13852 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13853 || operand_equal_p (max1, max2, 0)))))
13854 return true;
13855 else
13856 return false;
13860 case METHOD_TYPE:
13861 case FUNCTION_TYPE:
13862 /* Function types are the same if the return type and arguments types
13863 are the same. */
13864 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13865 trust_type_canonical))
13866 return false;
13868 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13869 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13870 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13871 return true;
13872 else
13874 tree parms1, parms2;
13876 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13877 parms1 && parms2;
13878 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13880 if (!gimple_canonical_types_compatible_p
13881 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13882 trust_type_canonical))
13883 return false;
13886 if (parms1 || parms2)
13887 return false;
13889 return true;
13892 case RECORD_TYPE:
13893 case UNION_TYPE:
13894 case QUAL_UNION_TYPE:
13896 tree f1, f2;
13898 /* Don't try to compare variants of an incomplete type, before
13899 TYPE_FIELDS has been copied around. */
13900 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13901 return true;
13904 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13905 return false;
13907 /* For aggregate types, all the fields must be the same. */
13908 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13909 f1 || f2;
13910 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13912 /* Skip non-fields and zero-sized fields. */
13913 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13914 || (DECL_SIZE (f1)
13915 && integer_zerop (DECL_SIZE (f1)))))
13916 f1 = TREE_CHAIN (f1);
13917 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13918 || (DECL_SIZE (f2)
13919 && integer_zerop (DECL_SIZE (f2)))))
13920 f2 = TREE_CHAIN (f2);
13921 if (!f1 || !f2)
13922 break;
13923 /* The fields must have the same name, offset and type. */
13924 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13925 || !gimple_compare_field_offset (f1, f2)
13926 || !gimple_canonical_types_compatible_p
13927 (TREE_TYPE (f1), TREE_TYPE (f2),
13928 trust_type_canonical))
13929 return false;
13932 /* If one aggregate has more fields than the other, they
13933 are not the same. */
13934 if (f1 || f2)
13935 return false;
13937 return true;
13940 default:
13941 /* Consider all types with language specific trees in them mutually
13942 compatible. This is executed only from verify_type and false
13943 positives can be tolerated. */
13944 gcc_assert (!in_lto_p);
13945 return true;
13949 /* For OPAQUE_TYPE T, it should have only size and alignment information
13950 and its mode should be of class MODE_OPAQUE. This function verifies
13951 these properties of T match TV which is the main variant of T and TC
13952 which is the canonical of T. */
13954 static void
13955 verify_opaque_type (const_tree t, tree tv, tree tc)
13957 gcc_assert (OPAQUE_TYPE_P (t));
13958 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13959 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13961 /* For an opaque type T1, check if some of its properties match
13962 the corresponding ones of the other opaque type T2, emit some
13963 error messages for those inconsistent ones. */
13964 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13965 const char *kind_msg)
13967 if (!OPAQUE_TYPE_P (t2))
13969 error ("type %s is not an opaque type", kind_msg);
13970 debug_tree (t2);
13971 return;
13973 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13975 error ("type %s is not with opaque mode", kind_msg);
13976 debug_tree (t2);
13977 return;
13979 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13981 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13982 debug_tree (t2);
13983 return;
13985 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13986 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13987 if (maybe_ne (t1_size, t2_size))
13989 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13990 debug_tree (t2);
13991 return;
13993 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13995 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13996 debug_tree (t2);
13997 return;
13999 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
14001 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
14002 debug_tree (t2);
14003 return;
14007 if (t != tv)
14008 check_properties_for_opaque_type (t, tv, "variant");
14010 if (t != tc)
14011 check_properties_for_opaque_type (t, tc, "canonical");
14014 /* Verify type T. */
14016 void
14017 verify_type (const_tree t)
14019 bool error_found = false;
14020 tree mv = TYPE_MAIN_VARIANT (t);
14021 tree ct = TYPE_CANONICAL (t);
14023 if (OPAQUE_TYPE_P (t))
14025 verify_opaque_type (t, mv, ct);
14026 return;
14029 if (!mv)
14031 error ("main variant is not defined");
14032 error_found = true;
14034 else if (mv != TYPE_MAIN_VARIANT (mv))
14036 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14037 debug_tree (mv);
14038 error_found = true;
14040 else if (t != mv && !verify_type_variant (t, mv))
14041 error_found = true;
14043 if (!ct)
14045 else if (TYPE_CANONICAL (ct) != ct)
14047 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14048 debug_tree (ct);
14049 error_found = true;
14051 /* Method and function types cannot be used to address memory and thus
14052 TYPE_CANONICAL really matters only for determining useless conversions.
14054 FIXME: C++ FE produce declarations of builtin functions that are not
14055 compatible with main variants. */
14056 else if (TREE_CODE (t) == FUNCTION_TYPE)
14058 else if (t != ct
14059 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14060 with variably sized arrays because their sizes possibly
14061 gimplified to different variables. */
14062 && !variably_modified_type_p (ct, NULL)
14063 && !gimple_canonical_types_compatible_p (t, ct, false)
14064 && COMPLETE_TYPE_P (t))
14066 error ("%<TYPE_CANONICAL%> is not compatible");
14067 debug_tree (ct);
14068 error_found = true;
14071 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14072 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14074 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14075 debug_tree (ct);
14076 error_found = true;
14078 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14080 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14081 debug_tree (ct);
14082 debug_tree (TYPE_MAIN_VARIANT (ct));
14083 error_found = true;
14087 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14088 if (RECORD_OR_UNION_TYPE_P (t))
14090 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14091 and danagle the pointer from time to time. */
14092 if (TYPE_VFIELD (t)
14093 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14094 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14096 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14097 debug_tree (TYPE_VFIELD (t));
14098 error_found = true;
14101 else if (TREE_CODE (t) == POINTER_TYPE)
14103 if (TYPE_NEXT_PTR_TO (t)
14104 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14106 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14107 debug_tree (TYPE_NEXT_PTR_TO (t));
14108 error_found = true;
14111 else if (TREE_CODE (t) == REFERENCE_TYPE)
14113 if (TYPE_NEXT_REF_TO (t)
14114 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14116 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14117 debug_tree (TYPE_NEXT_REF_TO (t));
14118 error_found = true;
14121 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14122 || FIXED_POINT_TYPE_P (t))
14124 /* FIXME: The following check should pass:
14125 useless_type_conversion_p (const_cast <tree> (t),
14126 TREE_TYPE (TYPE_MIN_VALUE (t))
14127 but does not for C sizetypes in LTO. */
14130 /* Check various uses of TYPE_MAXVAL_RAW. */
14131 if (RECORD_OR_UNION_TYPE_P (t))
14133 if (!TYPE_BINFO (t))
14135 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14137 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14138 debug_tree (TYPE_BINFO (t));
14139 error_found = true;
14141 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14143 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14144 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14145 error_found = true;
14148 else if (FUNC_OR_METHOD_TYPE_P (t))
14150 if (TYPE_METHOD_BASETYPE (t)
14151 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14152 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14154 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14155 debug_tree (TYPE_METHOD_BASETYPE (t));
14156 error_found = true;
14159 else if (TREE_CODE (t) == OFFSET_TYPE)
14161 if (TYPE_OFFSET_BASETYPE (t)
14162 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14163 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14165 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14166 debug_tree (TYPE_OFFSET_BASETYPE (t));
14167 error_found = true;
14170 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14171 || FIXED_POINT_TYPE_P (t))
14173 /* FIXME: The following check should pass:
14174 useless_type_conversion_p (const_cast <tree> (t),
14175 TREE_TYPE (TYPE_MAX_VALUE (t))
14176 but does not for C sizetypes in LTO. */
14178 else if (TREE_CODE (t) == ARRAY_TYPE)
14180 if (TYPE_ARRAY_MAX_SIZE (t)
14181 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14183 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14184 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14185 error_found = true;
14188 else if (TYPE_MAX_VALUE_RAW (t))
14190 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14191 debug_tree (TYPE_MAX_VALUE_RAW (t));
14192 error_found = true;
14195 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14197 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14198 debug_tree (TYPE_LANG_SLOT_1 (t));
14199 error_found = true;
14202 /* Check various uses of TYPE_VALUES_RAW. */
14203 if (TREE_CODE (t) == ENUMERAL_TYPE)
14204 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14206 tree value = TREE_VALUE (l);
14207 tree name = TREE_PURPOSE (l);
14209 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14210 CONST_DECL of ENUMERAL TYPE. */
14211 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14213 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14214 debug_tree (value);
14215 debug_tree (name);
14216 error_found = true;
14218 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14219 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14220 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14222 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14223 "to the enum");
14224 debug_tree (value);
14225 debug_tree (name);
14226 error_found = true;
14228 if (TREE_CODE (name) != IDENTIFIER_NODE)
14230 error ("enum value name is not %<IDENTIFIER_NODE%>");
14231 debug_tree (value);
14232 debug_tree (name);
14233 error_found = true;
14236 else if (TREE_CODE (t) == ARRAY_TYPE)
14238 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14240 error ("array %<TYPE_DOMAIN%> is not integer type");
14241 debug_tree (TYPE_DOMAIN (t));
14242 error_found = true;
14245 else if (RECORD_OR_UNION_TYPE_P (t))
14247 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14249 error ("%<TYPE_FIELDS%> defined in incomplete type");
14250 error_found = true;
14252 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14254 /* TODO: verify properties of decls. */
14255 if (TREE_CODE (fld) == FIELD_DECL)
14257 else if (TREE_CODE (fld) == TYPE_DECL)
14259 else if (TREE_CODE (fld) == CONST_DECL)
14261 else if (VAR_P (fld))
14263 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14265 else if (TREE_CODE (fld) == USING_DECL)
14267 else if (TREE_CODE (fld) == FUNCTION_DECL)
14269 else
14271 error ("wrong tree in %<TYPE_FIELDS%> list");
14272 debug_tree (fld);
14273 error_found = true;
14277 else if (TREE_CODE (t) == INTEGER_TYPE
14278 || TREE_CODE (t) == BOOLEAN_TYPE
14279 || TREE_CODE (t) == BITINT_TYPE
14280 || TREE_CODE (t) == OFFSET_TYPE
14281 || TREE_CODE (t) == REFERENCE_TYPE
14282 || TREE_CODE (t) == NULLPTR_TYPE
14283 || TREE_CODE (t) == POINTER_TYPE)
14285 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14287 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14288 "is %p",
14289 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14290 error_found = true;
14292 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14294 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14295 debug_tree (TYPE_CACHED_VALUES (t));
14296 error_found = true;
14298 /* Verify just enough of cache to ensure that no one copied it to new type.
14299 All copying should go by copy_node that should clear it. */
14300 else if (TYPE_CACHED_VALUES_P (t))
14302 int i;
14303 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14304 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14305 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14307 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14308 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14309 error_found = true;
14310 break;
14314 else if (FUNC_OR_METHOD_TYPE_P (t))
14315 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14317 /* C++ FE uses TREE_PURPOSE to store initial values. */
14318 if (TREE_PURPOSE (l) && in_lto_p)
14320 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14321 debug_tree (l);
14322 error_found = true;
14324 if (!TYPE_P (TREE_VALUE (l)))
14326 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14327 debug_tree (l);
14328 error_found = true;
14331 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14333 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14334 debug_tree (TYPE_VALUES_RAW (t));
14335 error_found = true;
14337 if (TREE_CODE (t) != INTEGER_TYPE
14338 && TREE_CODE (t) != BOOLEAN_TYPE
14339 && TREE_CODE (t) != BITINT_TYPE
14340 && TREE_CODE (t) != OFFSET_TYPE
14341 && TREE_CODE (t) != REFERENCE_TYPE
14342 && TREE_CODE (t) != NULLPTR_TYPE
14343 && TREE_CODE (t) != POINTER_TYPE
14344 && TYPE_CACHED_VALUES_P (t))
14346 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14347 error_found = true;
14350 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14351 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14352 of a type. */
14353 if (TREE_CODE (t) == METHOD_TYPE
14354 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14356 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14357 error_found = true;
14360 if (error_found)
14362 debug_tree (const_cast <tree> (t));
14363 internal_error ("%qs failed", __func__);
14368 /* Return 1 if ARG interpreted as signed in its precision is known to be
14369 always positive or 2 if ARG is known to be always negative, or 3 if
14370 ARG may be positive or negative. */
14373 get_range_pos_neg (tree arg)
14375 if (arg == error_mark_node)
14376 return 3;
14378 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14379 int cnt = 0;
14380 if (TREE_CODE (arg) == INTEGER_CST)
14382 wide_int w = wi::sext (wi::to_wide (arg), prec);
14383 if (wi::neg_p (w))
14384 return 2;
14385 else
14386 return 1;
14388 while (CONVERT_EXPR_P (arg)
14389 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14390 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14392 arg = TREE_OPERAND (arg, 0);
14393 /* Narrower value zero extended into wider type
14394 will always result in positive values. */
14395 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14396 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14397 return 1;
14398 prec = TYPE_PRECISION (TREE_TYPE (arg));
14399 if (++cnt > 30)
14400 return 3;
14403 if (TREE_CODE (arg) != SSA_NAME)
14404 return 3;
14405 value_range r;
14406 while (!get_global_range_query ()->range_of_expr (r, arg)
14407 || r.undefined_p () || r.varying_p ())
14409 gimple *g = SSA_NAME_DEF_STMT (arg);
14410 if (is_gimple_assign (g)
14411 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14413 tree t = gimple_assign_rhs1 (g);
14414 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14415 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14417 if (TYPE_UNSIGNED (TREE_TYPE (t))
14418 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14419 return 1;
14420 prec = TYPE_PRECISION (TREE_TYPE (t));
14421 arg = t;
14422 if (++cnt > 30)
14423 return 3;
14424 continue;
14427 return 3;
14429 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14431 /* For unsigned values, the "positive" range comes
14432 below the "negative" range. */
14433 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14434 return 1;
14435 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14436 return 2;
14438 else
14440 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14441 return 1;
14442 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14443 return 2;
14445 return 3;
14451 /* Return true if ARG is marked with the nonnull attribute in the
14452 current function signature. */
14454 bool
14455 nonnull_arg_p (const_tree arg)
14457 tree t, attrs, fntype;
14458 unsigned HOST_WIDE_INT arg_num;
14460 gcc_assert (TREE_CODE (arg) == PARM_DECL
14461 && (POINTER_TYPE_P (TREE_TYPE (arg))
14462 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14464 /* The static chain decl is always non null. */
14465 if (arg == cfun->static_chain_decl)
14466 return true;
14468 /* THIS argument of method is always non-NULL. */
14469 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14470 && arg == DECL_ARGUMENTS (cfun->decl)
14471 && flag_delete_null_pointer_checks)
14472 return true;
14474 /* Values passed by reference are always non-NULL. */
14475 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14476 && flag_delete_null_pointer_checks)
14477 return true;
14479 fntype = TREE_TYPE (cfun->decl);
14480 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14482 attrs = lookup_attribute ("nonnull", attrs);
14484 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14485 if (attrs == NULL_TREE)
14486 return false;
14488 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14489 if (TREE_VALUE (attrs) == NULL_TREE)
14490 return true;
14492 /* Get the position number for ARG in the function signature. */
14493 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14495 t = DECL_CHAIN (t), arg_num++)
14497 if (t == arg)
14498 break;
14501 gcc_assert (t == arg);
14503 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14504 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14506 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14507 return true;
14511 return false;
14514 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14515 information. */
14517 location_t
14518 set_block (location_t loc, tree block)
14520 location_t pure_loc = get_pure_location (loc);
14521 source_range src_range = get_range_from_loc (line_table, loc);
14522 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14523 return line_table->get_or_create_combined_loc (pure_loc, src_range, block,
14524 discriminator);
14527 location_t
14528 set_source_range (tree expr, location_t start, location_t finish)
14530 source_range src_range;
14531 src_range.m_start = start;
14532 src_range.m_finish = finish;
14533 return set_source_range (expr, src_range);
14536 location_t
14537 set_source_range (tree expr, source_range src_range)
14539 if (!EXPR_P (expr))
14540 return UNKNOWN_LOCATION;
14542 location_t expr_location = EXPR_LOCATION (expr);
14543 location_t pure_loc = get_pure_location (expr_location);
14544 unsigned discriminator = get_discriminator_from_loc (expr_location);
14545 location_t adhoc = line_table->get_or_create_combined_loc (pure_loc,
14546 src_range,
14547 nullptr,
14548 discriminator);
14549 SET_EXPR_LOCATION (expr, adhoc);
14550 return adhoc;
14553 /* Return EXPR, potentially wrapped with a node expression LOC,
14554 if !CAN_HAVE_LOCATION_P (expr).
14556 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14557 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14559 Wrapper nodes can be identified using location_wrapper_p. */
14561 tree
14562 maybe_wrap_with_location (tree expr, location_t loc)
14564 if (expr == NULL)
14565 return NULL;
14566 if (loc == UNKNOWN_LOCATION)
14567 return expr;
14568 if (CAN_HAVE_LOCATION_P (expr))
14569 return expr;
14570 /* We should only be adding wrappers for constants and for decls,
14571 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14572 gcc_assert (CONSTANT_CLASS_P (expr)
14573 || DECL_P (expr)
14574 || EXCEPTIONAL_CLASS_P (expr));
14576 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14577 any impact of the wrapper nodes. */
14578 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14579 return expr;
14581 /* Compiler-generated temporary variables don't need a wrapper. */
14582 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14583 return expr;
14585 /* If any auto_suppress_location_wrappers are active, don't create
14586 wrappers. */
14587 if (suppress_location_wrappers > 0)
14588 return expr;
14590 tree_code code
14591 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14592 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14593 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14594 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14595 /* Mark this node as being a wrapper. */
14596 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14597 return wrapper;
14600 int suppress_location_wrappers;
14602 /* Return the name of combined function FN, for debugging purposes. */
14604 const char *
14605 combined_fn_name (combined_fn fn)
14607 if (builtin_fn_p (fn))
14609 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14610 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14612 else
14613 return internal_fn_name (as_internal_fn (fn));
14616 /* Return a bitmap with a bit set corresponding to each argument in
14617 a function call type FNTYPE declared with attribute nonnull,
14618 or null if none of the function's argument are nonnull. The caller
14619 must free the bitmap. */
14621 bitmap
14622 get_nonnull_args (const_tree fntype)
14624 if (fntype == NULL_TREE)
14625 return NULL;
14627 bitmap argmap = NULL;
14628 if (TREE_CODE (fntype) == METHOD_TYPE)
14630 /* The this pointer in C++ non-static member functions is
14631 implicitly nonnull whether or not it's declared as such. */
14632 argmap = BITMAP_ALLOC (NULL);
14633 bitmap_set_bit (argmap, 0);
14636 tree attrs = TYPE_ATTRIBUTES (fntype);
14637 if (!attrs)
14638 return argmap;
14640 /* A function declaration can specify multiple attribute nonnull,
14641 each with zero or more arguments. The loop below creates a bitmap
14642 representing a union of all the arguments. An empty (but non-null)
14643 bitmap means that all arguments have been declaraed nonnull. */
14644 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14646 attrs = lookup_attribute ("nonnull", attrs);
14647 if (!attrs)
14648 break;
14650 if (!argmap)
14651 argmap = BITMAP_ALLOC (NULL);
14653 if (!TREE_VALUE (attrs))
14655 /* Clear the bitmap in case a previous attribute nonnull
14656 set it and this one overrides it for all arguments. */
14657 bitmap_clear (argmap);
14658 return argmap;
14661 /* Iterate over the indices of the format arguments declared nonnull
14662 and set a bit for each. */
14663 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14665 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14666 bitmap_set_bit (argmap, val);
14670 return argmap;
14673 /* Returns true if TYPE is a type where it and all of its subobjects
14674 (recursively) are of structure, union, or array type. */
14676 bool
14677 is_empty_type (const_tree type)
14679 if (RECORD_OR_UNION_TYPE_P (type))
14681 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14682 if (TREE_CODE (field) == FIELD_DECL
14683 && !DECL_PADDING_P (field)
14684 && !is_empty_type (TREE_TYPE (field)))
14685 return false;
14686 return true;
14688 else if (TREE_CODE (type) == ARRAY_TYPE)
14689 return (integer_minus_onep (array_type_nelts (type))
14690 || TYPE_DOMAIN (type) == NULL_TREE
14691 || is_empty_type (TREE_TYPE (type)));
14692 return false;
14695 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14696 that shouldn't be passed via stack. */
14698 bool
14699 default_is_empty_record (const_tree type)
14701 if (!abi_version_at_least (12))
14702 return false;
14704 if (type == error_mark_node)
14705 return false;
14707 if (TREE_ADDRESSABLE (type))
14708 return false;
14710 return is_empty_type (TYPE_MAIN_VARIANT (type));
14713 /* Determine whether TYPE is a structure with a flexible array member,
14714 or a union containing such a structure (possibly recursively). */
14716 bool
14717 flexible_array_type_p (const_tree type)
14719 tree x, last;
14720 switch (TREE_CODE (type))
14722 case RECORD_TYPE:
14723 last = NULL_TREE;
14724 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14725 if (TREE_CODE (x) == FIELD_DECL)
14726 last = x;
14727 if (last == NULL_TREE)
14728 return false;
14729 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14730 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14731 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14732 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14733 return true;
14734 return false;
14735 case UNION_TYPE:
14736 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14738 if (TREE_CODE (x) == FIELD_DECL
14739 && flexible_array_type_p (TREE_TYPE (x)))
14740 return true;
14742 return false;
14743 default:
14744 return false;
14748 /* Like int_size_in_bytes, but handle empty records specially. */
14750 HOST_WIDE_INT
14751 arg_int_size_in_bytes (const_tree type)
14753 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14756 /* Like size_in_bytes, but handle empty records specially. */
14758 tree
14759 arg_size_in_bytes (const_tree type)
14761 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14764 /* Return true if an expression with CODE has to have the same result type as
14765 its first operand. */
14767 bool
14768 expr_type_first_operand_type_p (tree_code code)
14770 switch (code)
14772 case NEGATE_EXPR:
14773 case ABS_EXPR:
14774 case BIT_NOT_EXPR:
14775 case PAREN_EXPR:
14776 case CONJ_EXPR:
14778 case PLUS_EXPR:
14779 case MINUS_EXPR:
14780 case MULT_EXPR:
14781 case TRUNC_DIV_EXPR:
14782 case CEIL_DIV_EXPR:
14783 case FLOOR_DIV_EXPR:
14784 case ROUND_DIV_EXPR:
14785 case TRUNC_MOD_EXPR:
14786 case CEIL_MOD_EXPR:
14787 case FLOOR_MOD_EXPR:
14788 case ROUND_MOD_EXPR:
14789 case RDIV_EXPR:
14790 case EXACT_DIV_EXPR:
14791 case MIN_EXPR:
14792 case MAX_EXPR:
14793 case BIT_IOR_EXPR:
14794 case BIT_XOR_EXPR:
14795 case BIT_AND_EXPR:
14797 case LSHIFT_EXPR:
14798 case RSHIFT_EXPR:
14799 case LROTATE_EXPR:
14800 case RROTATE_EXPR:
14801 return true;
14803 default:
14804 return false;
14808 /* Return a typenode for the "standard" C type with a given name. */
14809 tree
14810 get_typenode_from_name (const char *name)
14812 if (name == NULL || *name == '\0')
14813 return NULL_TREE;
14815 if (strcmp (name, "char") == 0)
14816 return char_type_node;
14817 if (strcmp (name, "unsigned char") == 0)
14818 return unsigned_char_type_node;
14819 if (strcmp (name, "signed char") == 0)
14820 return signed_char_type_node;
14822 if (strcmp (name, "short int") == 0)
14823 return short_integer_type_node;
14824 if (strcmp (name, "short unsigned int") == 0)
14825 return short_unsigned_type_node;
14827 if (strcmp (name, "int") == 0)
14828 return integer_type_node;
14829 if (strcmp (name, "unsigned int") == 0)
14830 return unsigned_type_node;
14832 if (strcmp (name, "long int") == 0)
14833 return long_integer_type_node;
14834 if (strcmp (name, "long unsigned int") == 0)
14835 return long_unsigned_type_node;
14837 if (strcmp (name, "long long int") == 0)
14838 return long_long_integer_type_node;
14839 if (strcmp (name, "long long unsigned int") == 0)
14840 return long_long_unsigned_type_node;
14842 gcc_unreachable ();
14845 /* List of pointer types used to declare builtins before we have seen their
14846 real declaration.
14848 Keep the size up to date in tree.h ! */
14849 const builtin_structptr_type builtin_structptr_types[6] =
14851 { fileptr_type_node, ptr_type_node, "FILE" },
14852 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14853 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14854 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14855 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14856 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14859 /* Return the maximum object size. */
14861 tree
14862 max_object_size (void)
14864 /* To do: Make this a configurable parameter. */
14865 return TYPE_MAX_VALUE (ptrdiff_type_node);
14868 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14869 parameter default to false and that weeds out error_mark_node. */
14871 bool
14872 verify_type_context (location_t loc, type_context_kind context,
14873 const_tree type, bool silent_p)
14875 if (type == error_mark_node)
14876 return true;
14878 gcc_assert (TYPE_P (type));
14879 return (!targetm.verify_type_context
14880 || targetm.verify_type_context (loc, context, type, silent_p));
14883 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14884 delete operators. Return false if they may or may not name such
14885 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14886 do not. */
14888 bool
14889 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14890 bool *pcertain /* = NULL */)
14892 bool certain;
14893 if (!pcertain)
14894 pcertain = &certain;
14896 const char *new_name = IDENTIFIER_POINTER (new_asm);
14897 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14898 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14899 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14901 /* The following failures are due to invalid names so they're not
14902 considered certain mismatches. */
14903 *pcertain = false;
14905 if (new_len < 5 || delete_len < 6)
14906 return false;
14907 if (new_name[0] == '_')
14908 ++new_name, --new_len;
14909 if (new_name[0] == '_')
14910 ++new_name, --new_len;
14911 if (delete_name[0] == '_')
14912 ++delete_name, --delete_len;
14913 if (delete_name[0] == '_')
14914 ++delete_name, --delete_len;
14915 if (new_len < 4 || delete_len < 5)
14916 return false;
14918 /* The following failures are due to names of user-defined operators
14919 so they're also not considered certain mismatches. */
14921 /* *_len is now just the length after initial underscores. */
14922 if (new_name[0] != 'Z' || new_name[1] != 'n')
14923 return false;
14924 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14925 return false;
14927 /* The following failures are certain mismatches. */
14928 *pcertain = true;
14930 /* _Znw must match _Zdl, _Zna must match _Zda. */
14931 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14932 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14933 return false;
14934 /* 'j', 'm' and 'y' correspond to size_t. */
14935 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14936 return false;
14937 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14938 return false;
14939 if (new_len == 4
14940 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14942 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14943 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14944 if (delete_len == 5)
14945 return true;
14946 if (delete_len == 6 && delete_name[5] == new_name[3])
14947 return true;
14948 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14949 return true;
14951 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14952 || (new_len == 33
14953 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14955 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14956 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14957 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14958 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14959 return true;
14960 if (delete_len == 21
14961 && delete_name[5] == new_name[3]
14962 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14963 return true;
14964 if (delete_len == 34
14965 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14966 return true;
14969 /* The negative result is conservative. */
14970 *pcertain = false;
14971 return false;
14974 /* Return the zero-based number corresponding to the argument being
14975 deallocated if FNDECL is a deallocation function or an out-of-bounds
14976 value if it isn't. */
14978 unsigned
14979 fndecl_dealloc_argno (tree fndecl)
14981 /* A call to operator delete isn't recognized as one to a built-in. */
14982 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14984 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14985 return 0;
14987 /* Avoid placement delete that's not been inlined. */
14988 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14989 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14990 || id_equal (fname, "_ZdaPvS_")) // array form
14991 return UINT_MAX;
14992 return 0;
14995 /* TODO: Handle user-defined functions with attribute malloc? Handle
14996 known non-built-ins like fopen? */
14997 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14999 switch (DECL_FUNCTION_CODE (fndecl))
15001 case BUILT_IN_FREE:
15002 case BUILT_IN_REALLOC:
15003 return 0;
15004 default:
15005 break;
15007 return UINT_MAX;
15010 tree attrs = DECL_ATTRIBUTES (fndecl);
15011 if (!attrs)
15012 return UINT_MAX;
15014 for (tree atfree = attrs;
15015 (atfree = lookup_attribute ("*dealloc", atfree));
15016 atfree = TREE_CHAIN (atfree))
15018 tree alloc = TREE_VALUE (atfree);
15019 if (!alloc)
15020 continue;
15022 tree pos = TREE_CHAIN (alloc);
15023 if (!pos)
15024 return 0;
15026 pos = TREE_VALUE (pos);
15027 return TREE_INT_CST_LOW (pos) - 1;
15030 return UINT_MAX;
15033 /* If EXPR refers to a character array or pointer declared attribute
15034 nonstring, return a decl for that array or pointer and set *REF
15035 to the referenced enclosing object or pointer. Otherwise return
15036 null. */
15038 tree
15039 get_attr_nonstring_decl (tree expr, tree *ref)
15041 tree decl = expr;
15042 tree var = NULL_TREE;
15043 if (TREE_CODE (decl) == SSA_NAME)
15045 gimple *def = SSA_NAME_DEF_STMT (decl);
15047 if (is_gimple_assign (def))
15049 tree_code code = gimple_assign_rhs_code (def);
15050 if (code == ADDR_EXPR
15051 || code == COMPONENT_REF
15052 || code == VAR_DECL)
15053 decl = gimple_assign_rhs1 (def);
15055 else
15056 var = SSA_NAME_VAR (decl);
15059 if (TREE_CODE (decl) == ADDR_EXPR)
15060 decl = TREE_OPERAND (decl, 0);
15062 /* To simplify calling code, store the referenced DECL regardless of
15063 the attribute determined below, but avoid storing the SSA_NAME_VAR
15064 obtained above (it's not useful for dataflow purposes). */
15065 if (ref)
15066 *ref = decl;
15068 /* Use the SSA_NAME_VAR that was determined above to see if it's
15069 declared nonstring. Otherwise drill down into the referenced
15070 DECL. */
15071 if (var)
15072 decl = var;
15073 else if (TREE_CODE (decl) == ARRAY_REF)
15074 decl = TREE_OPERAND (decl, 0);
15075 else if (TREE_CODE (decl) == COMPONENT_REF)
15076 decl = TREE_OPERAND (decl, 1);
15077 else if (TREE_CODE (decl) == MEM_REF)
15078 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15080 if (DECL_P (decl)
15081 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15082 return decl;
15084 return NULL_TREE;
15087 /* Return length of attribute names string,
15088 if arglist chain > 1, -1 otherwise. */
15091 get_target_clone_attr_len (tree arglist)
15093 tree arg;
15094 int str_len_sum = 0;
15095 int argnum = 0;
15097 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15099 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15100 size_t len = strlen (str);
15101 str_len_sum += len + 1;
15102 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15103 argnum++;
15104 argnum++;
15106 if (argnum <= 1)
15107 return -1;
15108 return str_len_sum;
15111 void
15112 tree_cc_finalize (void)
15114 clear_nonstandard_integer_type_cache ();
15115 vec_free (bitint_type_cache);
15118 #if CHECKING_P
15120 namespace selftest {
15122 /* Selftests for tree. */
15124 /* Verify that integer constants are sane. */
15126 static void
15127 test_integer_constants ()
15129 ASSERT_TRUE (integer_type_node != NULL);
15130 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15132 tree type = integer_type_node;
15134 tree zero = build_zero_cst (type);
15135 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15136 ASSERT_EQ (type, TREE_TYPE (zero));
15138 tree one = build_int_cst (type, 1);
15139 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15140 ASSERT_EQ (type, TREE_TYPE (zero));
15143 /* Verify identifiers. */
15145 static void
15146 test_identifiers ()
15148 tree identifier = get_identifier ("foo");
15149 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15150 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15153 /* Verify LABEL_DECL. */
15155 static void
15156 test_labels ()
15158 tree identifier = get_identifier ("err");
15159 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15160 identifier, void_type_node);
15161 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15162 ASSERT_FALSE (FORCED_LABEL (label_decl));
15165 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15166 are given by VALS. */
15168 static tree
15169 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15171 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15172 tree_vector_builder builder (type, vals.length (), 1);
15173 builder.splice (vals);
15174 return builder.build ();
15177 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15179 static void
15180 check_vector_cst (const vec<tree> &expected, tree actual)
15182 ASSERT_KNOWN_EQ (expected.length (),
15183 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15184 for (unsigned int i = 0; i < expected.length (); ++i)
15185 ASSERT_EQ (wi::to_wide (expected[i]),
15186 wi::to_wide (vector_cst_elt (actual, i)));
15189 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15190 and that its elements match EXPECTED. */
15192 static void
15193 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15194 unsigned int npatterns)
15196 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15197 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15198 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15199 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15200 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15201 check_vector_cst (expected, actual);
15204 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15205 and NPATTERNS background elements, and that its elements match
15206 EXPECTED. */
15208 static void
15209 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15210 unsigned int npatterns)
15212 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15213 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15214 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15215 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15216 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15217 check_vector_cst (expected, actual);
15220 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15221 and that its elements match EXPECTED. */
15223 static void
15224 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15225 unsigned int npatterns)
15227 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15228 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15229 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15230 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15231 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15232 check_vector_cst (expected, actual);
15235 /* Test the creation of VECTOR_CSTs. */
15237 static void
15238 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15240 auto_vec<tree, 8> elements (8);
15241 elements.quick_grow (8);
15242 tree element_type = build_nonstandard_integer_type (16, true);
15243 tree vector_type = build_vector_type (element_type, 8);
15245 /* Test a simple linear series with a base of 0 and a step of 1:
15246 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15247 for (unsigned int i = 0; i < 8; ++i)
15248 elements[i] = build_int_cst (element_type, i);
15249 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15250 check_vector_cst_stepped (elements, vector, 1);
15252 /* Try the same with the first element replaced by 100:
15253 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15254 elements[0] = build_int_cst (element_type, 100);
15255 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15256 check_vector_cst_stepped (elements, vector, 1);
15258 /* Try a series that wraps around.
15259 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15260 for (unsigned int i = 1; i < 8; ++i)
15261 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15262 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15263 check_vector_cst_stepped (elements, vector, 1);
15265 /* Try a downward series:
15266 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15267 for (unsigned int i = 1; i < 8; ++i)
15268 elements[i] = build_int_cst (element_type, 80 - i);
15269 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15270 check_vector_cst_stepped (elements, vector, 1);
15272 /* Try two interleaved series with different bases and steps:
15273 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15274 elements[1] = build_int_cst (element_type, 53);
15275 for (unsigned int i = 2; i < 8; i += 2)
15277 elements[i] = build_int_cst (element_type, 70 - i * 2);
15278 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15280 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15281 check_vector_cst_stepped (elements, vector, 2);
15283 /* Try a duplicated value:
15284 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15285 for (unsigned int i = 1; i < 8; ++i)
15286 elements[i] = elements[0];
15287 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15288 check_vector_cst_duplicate (elements, vector, 1);
15290 /* Try an interleaved duplicated value:
15291 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15292 elements[1] = build_int_cst (element_type, 55);
15293 for (unsigned int i = 2; i < 8; ++i)
15294 elements[i] = elements[i - 2];
15295 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15296 check_vector_cst_duplicate (elements, vector, 2);
15298 /* Try a duplicated value with 2 exceptions
15299 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15300 elements[0] = build_int_cst (element_type, 41);
15301 elements[1] = build_int_cst (element_type, 97);
15302 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15303 check_vector_cst_fill (elements, vector, 2);
15305 /* Try with and without a step
15306 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15307 for (unsigned int i = 3; i < 8; i += 2)
15308 elements[i] = build_int_cst (element_type, i * 7);
15309 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15310 check_vector_cst_stepped (elements, vector, 2);
15312 /* Try a fully-general constant:
15313 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15314 elements[5] = build_int_cst (element_type, 9990);
15315 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15316 check_vector_cst_fill (elements, vector, 4);
15319 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15320 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15321 modifying its argument in-place. */
15323 static void
15324 check_strip_nops (tree node, tree expected)
15326 STRIP_NOPS (node);
15327 ASSERT_EQ (expected, node);
15330 /* Verify location wrappers. */
15332 static void
15333 test_location_wrappers ()
15335 location_t loc = BUILTINS_LOCATION;
15337 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15339 /* Wrapping a constant. */
15340 tree int_cst = build_int_cst (integer_type_node, 42);
15341 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15342 ASSERT_FALSE (location_wrapper_p (int_cst));
15344 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15345 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15346 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15347 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15349 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15350 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15352 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15353 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15354 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15355 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15357 /* Wrapping a STRING_CST. */
15358 tree string_cst = build_string (4, "foo");
15359 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15360 ASSERT_FALSE (location_wrapper_p (string_cst));
15362 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15363 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15364 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15365 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15366 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15369 /* Wrapping a variable. */
15370 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15371 get_identifier ("some_int_var"),
15372 integer_type_node);
15373 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15374 ASSERT_FALSE (location_wrapper_p (int_var));
15376 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15377 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15378 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15379 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15381 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15382 wrapper. */
15383 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15384 ASSERT_FALSE (location_wrapper_p (r_cast));
15385 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15387 /* Verify that STRIP_NOPS removes wrappers. */
15388 check_strip_nops (wrapped_int_cst, int_cst);
15389 check_strip_nops (wrapped_string_cst, string_cst);
15390 check_strip_nops (wrapped_int_var, int_var);
15393 /* Test various tree predicates. Verify that location wrappers don't
15394 affect the results. */
15396 static void
15397 test_predicates ()
15399 /* Build various constants and wrappers around them. */
15401 location_t loc = BUILTINS_LOCATION;
15403 tree i_0 = build_int_cst (integer_type_node, 0);
15404 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15406 tree i_1 = build_int_cst (integer_type_node, 1);
15407 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15409 tree i_m1 = build_int_cst (integer_type_node, -1);
15410 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15412 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15413 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15414 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15415 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15416 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15417 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15419 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15420 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15421 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15423 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15424 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15425 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15427 /* TODO: vector constants. */
15429 /* Test integer_onep. */
15430 ASSERT_FALSE (integer_onep (i_0));
15431 ASSERT_FALSE (integer_onep (wr_i_0));
15432 ASSERT_TRUE (integer_onep (i_1));
15433 ASSERT_TRUE (integer_onep (wr_i_1));
15434 ASSERT_FALSE (integer_onep (i_m1));
15435 ASSERT_FALSE (integer_onep (wr_i_m1));
15436 ASSERT_FALSE (integer_onep (f_0));
15437 ASSERT_FALSE (integer_onep (wr_f_0));
15438 ASSERT_FALSE (integer_onep (f_1));
15439 ASSERT_FALSE (integer_onep (wr_f_1));
15440 ASSERT_FALSE (integer_onep (f_m1));
15441 ASSERT_FALSE (integer_onep (wr_f_m1));
15442 ASSERT_FALSE (integer_onep (c_i_0));
15443 ASSERT_TRUE (integer_onep (c_i_1));
15444 ASSERT_FALSE (integer_onep (c_i_m1));
15445 ASSERT_FALSE (integer_onep (c_f_0));
15446 ASSERT_FALSE (integer_onep (c_f_1));
15447 ASSERT_FALSE (integer_onep (c_f_m1));
15449 /* Test integer_zerop. */
15450 ASSERT_TRUE (integer_zerop (i_0));
15451 ASSERT_TRUE (integer_zerop (wr_i_0));
15452 ASSERT_FALSE (integer_zerop (i_1));
15453 ASSERT_FALSE (integer_zerop (wr_i_1));
15454 ASSERT_FALSE (integer_zerop (i_m1));
15455 ASSERT_FALSE (integer_zerop (wr_i_m1));
15456 ASSERT_FALSE (integer_zerop (f_0));
15457 ASSERT_FALSE (integer_zerop (wr_f_0));
15458 ASSERT_FALSE (integer_zerop (f_1));
15459 ASSERT_FALSE (integer_zerop (wr_f_1));
15460 ASSERT_FALSE (integer_zerop (f_m1));
15461 ASSERT_FALSE (integer_zerop (wr_f_m1));
15462 ASSERT_TRUE (integer_zerop (c_i_0));
15463 ASSERT_FALSE (integer_zerop (c_i_1));
15464 ASSERT_FALSE (integer_zerop (c_i_m1));
15465 ASSERT_FALSE (integer_zerop (c_f_0));
15466 ASSERT_FALSE (integer_zerop (c_f_1));
15467 ASSERT_FALSE (integer_zerop (c_f_m1));
15469 /* Test integer_all_onesp. */
15470 ASSERT_FALSE (integer_all_onesp (i_0));
15471 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15472 ASSERT_FALSE (integer_all_onesp (i_1));
15473 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15474 ASSERT_TRUE (integer_all_onesp (i_m1));
15475 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15476 ASSERT_FALSE (integer_all_onesp (f_0));
15477 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15478 ASSERT_FALSE (integer_all_onesp (f_1));
15479 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15480 ASSERT_FALSE (integer_all_onesp (f_m1));
15481 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15482 ASSERT_FALSE (integer_all_onesp (c_i_0));
15483 ASSERT_FALSE (integer_all_onesp (c_i_1));
15484 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15485 ASSERT_FALSE (integer_all_onesp (c_f_0));
15486 ASSERT_FALSE (integer_all_onesp (c_f_1));
15487 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15489 /* Test integer_minus_onep. */
15490 ASSERT_FALSE (integer_minus_onep (i_0));
15491 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15492 ASSERT_FALSE (integer_minus_onep (i_1));
15493 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15494 ASSERT_TRUE (integer_minus_onep (i_m1));
15495 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15496 ASSERT_FALSE (integer_minus_onep (f_0));
15497 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15498 ASSERT_FALSE (integer_minus_onep (f_1));
15499 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15500 ASSERT_FALSE (integer_minus_onep (f_m1));
15501 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15502 ASSERT_FALSE (integer_minus_onep (c_i_0));
15503 ASSERT_FALSE (integer_minus_onep (c_i_1));
15504 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15505 ASSERT_FALSE (integer_minus_onep (c_f_0));
15506 ASSERT_FALSE (integer_minus_onep (c_f_1));
15507 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15509 /* Test integer_each_onep. */
15510 ASSERT_FALSE (integer_each_onep (i_0));
15511 ASSERT_FALSE (integer_each_onep (wr_i_0));
15512 ASSERT_TRUE (integer_each_onep (i_1));
15513 ASSERT_TRUE (integer_each_onep (wr_i_1));
15514 ASSERT_FALSE (integer_each_onep (i_m1));
15515 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15516 ASSERT_FALSE (integer_each_onep (f_0));
15517 ASSERT_FALSE (integer_each_onep (wr_f_0));
15518 ASSERT_FALSE (integer_each_onep (f_1));
15519 ASSERT_FALSE (integer_each_onep (wr_f_1));
15520 ASSERT_FALSE (integer_each_onep (f_m1));
15521 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15522 ASSERT_FALSE (integer_each_onep (c_i_0));
15523 ASSERT_FALSE (integer_each_onep (c_i_1));
15524 ASSERT_FALSE (integer_each_onep (c_i_m1));
15525 ASSERT_FALSE (integer_each_onep (c_f_0));
15526 ASSERT_FALSE (integer_each_onep (c_f_1));
15527 ASSERT_FALSE (integer_each_onep (c_f_m1));
15529 /* Test integer_truep. */
15530 ASSERT_FALSE (integer_truep (i_0));
15531 ASSERT_FALSE (integer_truep (wr_i_0));
15532 ASSERT_TRUE (integer_truep (i_1));
15533 ASSERT_TRUE (integer_truep (wr_i_1));
15534 ASSERT_FALSE (integer_truep (i_m1));
15535 ASSERT_FALSE (integer_truep (wr_i_m1));
15536 ASSERT_FALSE (integer_truep (f_0));
15537 ASSERT_FALSE (integer_truep (wr_f_0));
15538 ASSERT_FALSE (integer_truep (f_1));
15539 ASSERT_FALSE (integer_truep (wr_f_1));
15540 ASSERT_FALSE (integer_truep (f_m1));
15541 ASSERT_FALSE (integer_truep (wr_f_m1));
15542 ASSERT_FALSE (integer_truep (c_i_0));
15543 ASSERT_TRUE (integer_truep (c_i_1));
15544 ASSERT_FALSE (integer_truep (c_i_m1));
15545 ASSERT_FALSE (integer_truep (c_f_0));
15546 ASSERT_FALSE (integer_truep (c_f_1));
15547 ASSERT_FALSE (integer_truep (c_f_m1));
15549 /* Test integer_nonzerop. */
15550 ASSERT_FALSE (integer_nonzerop (i_0));
15551 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15552 ASSERT_TRUE (integer_nonzerop (i_1));
15553 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15554 ASSERT_TRUE (integer_nonzerop (i_m1));
15555 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15556 ASSERT_FALSE (integer_nonzerop (f_0));
15557 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15558 ASSERT_FALSE (integer_nonzerop (f_1));
15559 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15560 ASSERT_FALSE (integer_nonzerop (f_m1));
15561 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15562 ASSERT_FALSE (integer_nonzerop (c_i_0));
15563 ASSERT_TRUE (integer_nonzerop (c_i_1));
15564 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15565 ASSERT_FALSE (integer_nonzerop (c_f_0));
15566 ASSERT_FALSE (integer_nonzerop (c_f_1));
15567 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15569 /* Test real_zerop. */
15570 ASSERT_FALSE (real_zerop (i_0));
15571 ASSERT_FALSE (real_zerop (wr_i_0));
15572 ASSERT_FALSE (real_zerop (i_1));
15573 ASSERT_FALSE (real_zerop (wr_i_1));
15574 ASSERT_FALSE (real_zerop (i_m1));
15575 ASSERT_FALSE (real_zerop (wr_i_m1));
15576 ASSERT_TRUE (real_zerop (f_0));
15577 ASSERT_TRUE (real_zerop (wr_f_0));
15578 ASSERT_FALSE (real_zerop (f_1));
15579 ASSERT_FALSE (real_zerop (wr_f_1));
15580 ASSERT_FALSE (real_zerop (f_m1));
15581 ASSERT_FALSE (real_zerop (wr_f_m1));
15582 ASSERT_FALSE (real_zerop (c_i_0));
15583 ASSERT_FALSE (real_zerop (c_i_1));
15584 ASSERT_FALSE (real_zerop (c_i_m1));
15585 ASSERT_TRUE (real_zerop (c_f_0));
15586 ASSERT_FALSE (real_zerop (c_f_1));
15587 ASSERT_FALSE (real_zerop (c_f_m1));
15589 /* Test real_onep. */
15590 ASSERT_FALSE (real_onep (i_0));
15591 ASSERT_FALSE (real_onep (wr_i_0));
15592 ASSERT_FALSE (real_onep (i_1));
15593 ASSERT_FALSE (real_onep (wr_i_1));
15594 ASSERT_FALSE (real_onep (i_m1));
15595 ASSERT_FALSE (real_onep (wr_i_m1));
15596 ASSERT_FALSE (real_onep (f_0));
15597 ASSERT_FALSE (real_onep (wr_f_0));
15598 ASSERT_TRUE (real_onep (f_1));
15599 ASSERT_TRUE (real_onep (wr_f_1));
15600 ASSERT_FALSE (real_onep (f_m1));
15601 ASSERT_FALSE (real_onep (wr_f_m1));
15602 ASSERT_FALSE (real_onep (c_i_0));
15603 ASSERT_FALSE (real_onep (c_i_1));
15604 ASSERT_FALSE (real_onep (c_i_m1));
15605 ASSERT_FALSE (real_onep (c_f_0));
15606 ASSERT_TRUE (real_onep (c_f_1));
15607 ASSERT_FALSE (real_onep (c_f_m1));
15609 /* Test real_minus_onep. */
15610 ASSERT_FALSE (real_minus_onep (i_0));
15611 ASSERT_FALSE (real_minus_onep (wr_i_0));
15612 ASSERT_FALSE (real_minus_onep (i_1));
15613 ASSERT_FALSE (real_minus_onep (wr_i_1));
15614 ASSERT_FALSE (real_minus_onep (i_m1));
15615 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15616 ASSERT_FALSE (real_minus_onep (f_0));
15617 ASSERT_FALSE (real_minus_onep (wr_f_0));
15618 ASSERT_FALSE (real_minus_onep (f_1));
15619 ASSERT_FALSE (real_minus_onep (wr_f_1));
15620 ASSERT_TRUE (real_minus_onep (f_m1));
15621 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15622 ASSERT_FALSE (real_minus_onep (c_i_0));
15623 ASSERT_FALSE (real_minus_onep (c_i_1));
15624 ASSERT_FALSE (real_minus_onep (c_i_m1));
15625 ASSERT_FALSE (real_minus_onep (c_f_0));
15626 ASSERT_FALSE (real_minus_onep (c_f_1));
15627 ASSERT_TRUE (real_minus_onep (c_f_m1));
15629 /* Test zerop. */
15630 ASSERT_TRUE (zerop (i_0));
15631 ASSERT_TRUE (zerop (wr_i_0));
15632 ASSERT_FALSE (zerop (i_1));
15633 ASSERT_FALSE (zerop (wr_i_1));
15634 ASSERT_FALSE (zerop (i_m1));
15635 ASSERT_FALSE (zerop (wr_i_m1));
15636 ASSERT_TRUE (zerop (f_0));
15637 ASSERT_TRUE (zerop (wr_f_0));
15638 ASSERT_FALSE (zerop (f_1));
15639 ASSERT_FALSE (zerop (wr_f_1));
15640 ASSERT_FALSE (zerop (f_m1));
15641 ASSERT_FALSE (zerop (wr_f_m1));
15642 ASSERT_TRUE (zerop (c_i_0));
15643 ASSERT_FALSE (zerop (c_i_1));
15644 ASSERT_FALSE (zerop (c_i_m1));
15645 ASSERT_TRUE (zerop (c_f_0));
15646 ASSERT_FALSE (zerop (c_f_1));
15647 ASSERT_FALSE (zerop (c_f_m1));
15649 /* Test tree_expr_nonnegative_p. */
15650 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15651 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15652 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15653 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15654 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15655 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15656 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15657 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15658 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15659 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15660 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15661 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15662 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15663 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15664 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15665 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15666 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15667 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15669 /* Test tree_expr_nonzero_p. */
15670 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15671 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15672 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15673 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15674 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15675 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15677 /* Test integer_valued_real_p. */
15678 ASSERT_FALSE (integer_valued_real_p (i_0));
15679 ASSERT_TRUE (integer_valued_real_p (f_0));
15680 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15681 ASSERT_TRUE (integer_valued_real_p (f_1));
15682 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15684 /* Test integer_pow2p. */
15685 ASSERT_FALSE (integer_pow2p (i_0));
15686 ASSERT_TRUE (integer_pow2p (i_1));
15687 ASSERT_TRUE (integer_pow2p (wr_i_1));
15689 /* Test uniform_integer_cst_p. */
15690 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15691 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15692 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15693 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15694 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15695 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15696 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15697 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15698 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15699 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15700 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15701 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15702 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15703 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15704 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15705 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15706 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15707 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15710 /* Check that string escaping works correctly. */
15712 static void
15713 test_escaped_strings (void)
15715 int saved_cutoff;
15716 escaped_string msg;
15718 msg.escape (NULL);
15719 /* ASSERT_STREQ does not accept NULL as a valid test
15720 result, so we have to use ASSERT_EQ instead. */
15721 ASSERT_EQ (NULL, (const char *) msg);
15723 msg.escape ("");
15724 ASSERT_STREQ ("", (const char *) msg);
15726 msg.escape ("foobar");
15727 ASSERT_STREQ ("foobar", (const char *) msg);
15729 /* Ensure that we have -fmessage-length set to 0. */
15730 saved_cutoff = pp_line_cutoff (global_dc->printer);
15731 pp_line_cutoff (global_dc->printer) = 0;
15733 msg.escape ("foo\nbar");
15734 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15736 msg.escape ("\a\b\f\n\r\t\v");
15737 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15739 /* Now repeat the tests with -fmessage-length set to 5. */
15740 pp_line_cutoff (global_dc->printer) = 5;
15742 /* Note that the newline is not translated into an escape. */
15743 msg.escape ("foo\nbar");
15744 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15746 msg.escape ("\a\b\f\n\r\t\v");
15747 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15749 /* Restore the original message length setting. */
15750 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15753 /* Run all of the selftests within this file. */
15755 void
15756 tree_cc_tests ()
15758 test_integer_constants ();
15759 test_identifiers ();
15760 test_labels ();
15761 test_vector_cst_patterns ();
15762 test_location_wrappers ();
15763 test_predicates ();
15764 test_escaped_strings ();
15767 } // namespace selftest
15769 #endif /* CHECKING_P */
15771 #include "gt-tree.h"