Update baseline symbols for hppa-linux.
[official-gcc.git] / gcc / tree.cc
blob8a8d6d5091a403c0888e81f9c16204643b3c9a19
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name[] = {
83 #include "all-tree.def"
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings[] =
94 "exceptional",
95 "constant",
96 "type",
97 "declaration",
98 "reference",
99 "comparison",
100 "unary",
101 "binary",
102 "statement",
103 "vl_exp",
104 "expression"
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack *h, void *obj);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts[MAX_TREE_CODES];
113 uint64_t tree_node_counts[(int) all_kinds];
114 uint64_t tree_node_sizes[(int) all_kinds];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names[] = {
118 "decls",
119 "types",
120 "blocks",
121 "stmts",
122 "refs",
123 "exprs",
124 "constants",
125 "identifiers",
126 "vecs",
127 "binfos",
128 "ssa names",
129 "constructors",
130 "random kinds",
131 "lang_decl kinds",
132 "lang_type kinds",
133 "omp clauses",
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid = 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user)) type_hash {
148 unsigned long hash;
149 tree type;
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
157 static hashval_t hash (type_hash *t) { return t->hash; }
158 static bool equal (type_hash *a, type_hash *b);
160 static int
161 keep_cache_entry (type_hash *&t)
163 return ggc_marked_p (t->type);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node;
179 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
181 static hashval_t hash (tree t);
182 static bool equal (tree x, tree y);
185 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
191 typedef std::pair<tree, const poly_wide_int *> compare_type;
192 static hashval_t hash (tree t);
193 static bool equal (tree x, const compare_type &y);
196 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node;
204 static GTY (()) tree cl_target_option_node;
206 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
212 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
214 /* General tree->tree mapping structure for use in hash tables. */
217 static GTY ((cache))
218 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
223 static GTY ((cache))
224 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
226 static void set_type_quals (tree, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
234 bool int_n_enabled_p[NUM_INT_N_ENTS];
235 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
237 bool tree_contains_struct[MAX_TREE_CODES][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 1, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 2, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_DEVICE_TYPE */
302 0, /* OMP_CLAUSE_FOR */
303 0, /* OMP_CLAUSE_PARALLEL */
304 0, /* OMP_CLAUSE_SECTIONS */
305 0, /* OMP_CLAUSE_TASKGROUP */
306 1, /* OMP_CLAUSE_PRIORITY */
307 1, /* OMP_CLAUSE_GRAINSIZE */
308 1, /* OMP_CLAUSE_NUM_TASKS */
309 0, /* OMP_CLAUSE_NOGROUP */
310 0, /* OMP_CLAUSE_THREADS */
311 0, /* OMP_CLAUSE_SIMD */
312 1, /* OMP_CLAUSE_HINT */
313 0, /* OMP_CLAUSE_DEFAULTMAP */
314 0, /* OMP_CLAUSE_ORDER */
315 0, /* OMP_CLAUSE_BIND */
316 1, /* OMP_CLAUSE_FILTER */
317 1, /* OMP_CLAUSE__SIMDUID_ */
318 0, /* OMP_CLAUSE__SIMT_ */
319 0, /* OMP_CLAUSE_INDEPENDENT */
320 1, /* OMP_CLAUSE_WORKER */
321 1, /* OMP_CLAUSE_VECTOR */
322 1, /* OMP_CLAUSE_NUM_GANGS */
323 1, /* OMP_CLAUSE_NUM_WORKERS */
324 1, /* OMP_CLAUSE_VECTOR_LENGTH */
325 3, /* OMP_CLAUSE_TILE */
326 0, /* OMP_CLAUSE_IF_PRESENT */
327 0, /* OMP_CLAUSE_FINALIZE */
328 0, /* OMP_CLAUSE_NOHOST */
331 const char * const omp_clause_code_name[] =
333 "error_clause",
334 "private",
335 "shared",
336 "firstprivate",
337 "lastprivate",
338 "reduction",
339 "task_reduction",
340 "in_reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "affinity",
345 "aligned",
346 "allocate",
347 "depend",
348 "nontemporal",
349 "uniform",
350 "enter",
351 "link",
352 "detach",
353 "use_device_ptr",
354 "use_device_addr",
355 "is_device_ptr",
356 "inclusive",
357 "exclusive",
358 "from",
359 "to",
360 "map",
361 "has_device_addr",
362 "doacross",
363 "_cache_",
364 "gang",
365 "async",
366 "wait",
367 "auto",
368 "seq",
369 "_looptemp_",
370 "_reductemp_",
371 "_condtemp_",
372 "_scantemp_",
373 "if",
374 "num_threads",
375 "schedule",
376 "nowait",
377 "ordered",
378 "default",
379 "collapse",
380 "untied",
381 "final",
382 "mergeable",
383 "device",
384 "dist_schedule",
385 "inbranch",
386 "notinbranch",
387 "num_teams",
388 "thread_limit",
389 "proc_bind",
390 "safelen",
391 "simdlen",
392 "device_type",
393 "for",
394 "parallel",
395 "sections",
396 "taskgroup",
397 "priority",
398 "grainsize",
399 "num_tasks",
400 "nogroup",
401 "threads",
402 "simd",
403 "hint",
404 "defaultmap",
405 "order",
406 "bind",
407 "filter",
408 "_simduid_",
409 "_simt_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length",
416 "tile",
417 "if_present",
418 "finalize",
419 "nohost",
422 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
423 clause names, but for use in diagnostics etc. would like to use the "user"
424 clause names. */
426 const char *
427 user_omp_clause_code_name (tree clause, bool oacc)
429 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
430 distinguish clauses as seen by the user. See also where front ends do
431 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
432 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
433 switch (OMP_CLAUSE_MAP_KIND (clause))
435 case GOMP_MAP_FORCE_ALLOC:
436 case GOMP_MAP_ALLOC: return "create";
437 case GOMP_MAP_FORCE_TO:
438 case GOMP_MAP_TO: return "copyin";
439 case GOMP_MAP_FORCE_FROM:
440 case GOMP_MAP_FROM: return "copyout";
441 case GOMP_MAP_FORCE_TOFROM:
442 case GOMP_MAP_TOFROM: return "copy";
443 case GOMP_MAP_RELEASE: return "delete";
444 case GOMP_MAP_FORCE_PRESENT: return "present";
445 case GOMP_MAP_ATTACH: return "attach";
446 case GOMP_MAP_FORCE_DETACH:
447 case GOMP_MAP_DETACH: return "detach";
448 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
449 case GOMP_MAP_LINK: return "link";
450 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
451 default: break;
454 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.cc. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.cc:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case BITINT_TYPE:
995 case LANG_TYPE: return sizeof (tree_type_non_common);
996 default:
997 gcc_checking_assert (code >= NUM_TREE_CODES);
998 return lang_hooks.tree_size (code);
1001 case tcc_reference: /* a reference */
1002 case tcc_expression: /* an expression */
1003 case tcc_statement: /* an expression with side effects */
1004 case tcc_comparison: /* a comparison expression */
1005 case tcc_unary: /* a unary arithmetic expression */
1006 case tcc_binary: /* a binary arithmetic expression */
1007 return (sizeof (struct tree_exp)
1008 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1010 case tcc_constant: /* a constant */
1011 switch (code)
1013 case VOID_CST: return sizeof (tree_typed);
1014 case INTEGER_CST: gcc_unreachable ();
1015 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1016 case REAL_CST: return sizeof (tree_real_cst);
1017 case FIXED_CST: return sizeof (tree_fixed_cst);
1018 case COMPLEX_CST: return sizeof (tree_complex);
1019 case VECTOR_CST: gcc_unreachable ();
1020 case STRING_CST: gcc_unreachable ();
1021 default:
1022 gcc_checking_assert (code >= NUM_TREE_CODES);
1023 return lang_hooks.tree_size (code);
1026 case tcc_exceptional: /* something random, like an identifier. */
1027 switch (code)
1029 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1030 case TREE_LIST: return sizeof (tree_list);
1032 case ERROR_MARK:
1033 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1035 case TREE_VEC: gcc_unreachable ();
1036 case OMP_CLAUSE: gcc_unreachable ();
1038 case SSA_NAME: return sizeof (tree_ssa_name);
1040 case STATEMENT_LIST: return sizeof (tree_statement_list);
1041 case BLOCK: return sizeof (struct tree_block);
1042 case CONSTRUCTOR: return sizeof (tree_constructor);
1043 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1044 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1046 default:
1047 gcc_checking_assert (code >= NUM_TREE_CODES);
1048 return lang_hooks.tree_size (code);
1051 default:
1052 gcc_unreachable ();
1056 /* Compute the number of bytes occupied by NODE. This routine only
1057 looks at TREE_CODE, except for those nodes that have variable sizes. */
1058 size_t
1059 tree_size (const_tree node)
1061 const enum tree_code code = TREE_CODE (node);
1062 switch (code)
1064 case INTEGER_CST:
1065 return (sizeof (struct tree_int_cst)
1066 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1068 case TREE_BINFO:
1069 return (offsetof (struct tree_binfo, base_binfos)
1070 + vec<tree, va_gc>
1071 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1073 case TREE_VEC:
1074 return (sizeof (struct tree_vec)
1075 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1077 case VECTOR_CST:
1078 return (sizeof (struct tree_vector)
1079 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1081 case STRING_CST:
1082 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1084 case OMP_CLAUSE:
1085 return (sizeof (struct tree_omp_clause)
1086 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1087 * sizeof (tree));
1089 default:
1090 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1091 return (sizeof (struct tree_exp)
1092 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1093 else
1094 return tree_code_size (code);
1098 /* Return tree node kind based on tree CODE. */
1100 static tree_node_kind
1101 get_stats_node_kind (enum tree_code code)
1103 enum tree_code_class type = TREE_CODE_CLASS (code);
1105 switch (type)
1107 case tcc_declaration: /* A decl node */
1108 return d_kind;
1109 case tcc_type: /* a type node */
1110 return t_kind;
1111 case tcc_statement: /* an expression with side effects */
1112 return s_kind;
1113 case tcc_reference: /* a reference */
1114 return r_kind;
1115 case tcc_expression: /* an expression */
1116 case tcc_comparison: /* a comparison expression */
1117 case tcc_unary: /* a unary arithmetic expression */
1118 case tcc_binary: /* a binary arithmetic expression */
1119 return e_kind;
1120 case tcc_constant: /* a constant */
1121 return c_kind;
1122 case tcc_exceptional: /* something random, like an identifier. */
1123 switch (code)
1125 case IDENTIFIER_NODE:
1126 return id_kind;
1127 case TREE_VEC:
1128 return vec_kind;
1129 case TREE_BINFO:
1130 return binfo_kind;
1131 case SSA_NAME:
1132 return ssa_name_kind;
1133 case BLOCK:
1134 return b_kind;
1135 case CONSTRUCTOR:
1136 return constr_kind;
1137 case OMP_CLAUSE:
1138 return omp_clause_kind;
1139 default:
1140 return x_kind;
1142 break;
1143 case tcc_vl_exp:
1144 return e_kind;
1145 default:
1146 gcc_unreachable ();
1150 /* Record interesting allocation statistics for a tree node with CODE
1151 and LENGTH. */
1153 static void
1154 record_node_allocation_statistics (enum tree_code code, size_t length)
1156 if (!GATHER_STATISTICS)
1157 return;
1159 tree_node_kind kind = get_stats_node_kind (code);
1161 tree_code_counts[(int) code]++;
1162 tree_node_counts[(int) kind]++;
1163 tree_node_sizes[(int) kind] += length;
1166 /* Allocate and return a new UID from the DECL_UID namespace. */
1169 allocate_decl_uid (void)
1171 return next_decl_uid++;
1174 /* Return a newly allocated node of code CODE. For decl and type
1175 nodes, some other fields are initialized. The rest of the node is
1176 initialized to zero. This function cannot be used for TREE_VEC,
1177 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1178 tree_code_size.
1180 Achoo! I got a code in the node. */
1182 tree
1183 make_node (enum tree_code code MEM_STAT_DECL)
1185 tree t;
1186 enum tree_code_class type = TREE_CODE_CLASS (code);
1187 size_t length = tree_code_size (code);
1189 record_node_allocation_statistics (code, length);
1191 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1192 TREE_SET_CODE (t, code);
1194 switch (type)
1196 case tcc_statement:
1197 if (code != DEBUG_BEGIN_STMT)
1198 TREE_SIDE_EFFECTS (t) = 1;
1199 break;
1201 case tcc_declaration:
1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1204 if (code == FUNCTION_DECL)
1206 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1207 SET_DECL_MODE (t, FUNCTION_MODE);
1209 else
1210 SET_DECL_ALIGN (t, 1);
1212 DECL_SOURCE_LOCATION (t) = input_location;
1213 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1214 DECL_UID (t) = --next_debug_decl_uid;
1215 else
1217 DECL_UID (t) = allocate_decl_uid ();
1218 SET_DECL_PT_UID (t, -1);
1220 if (TREE_CODE (t) == LABEL_DECL)
1221 LABEL_DECL_UID (t) = -1;
1223 break;
1225 case tcc_type:
1226 TYPE_UID (t) = next_type_uid++;
1227 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1228 TYPE_USER_ALIGN (t) = 0;
1229 TYPE_MAIN_VARIANT (t) = t;
1230 TYPE_CANONICAL (t) = t;
1232 /* Default to no attributes for type, but let target change that. */
1233 TYPE_ATTRIBUTES (t) = NULL_TREE;
1234 targetm.set_default_type_attributes (t);
1236 /* We have not yet computed the alias set for this type. */
1237 TYPE_ALIAS_SET (t) = -1;
1238 break;
1240 case tcc_constant:
1241 TREE_CONSTANT (t) = 1;
1242 break;
1244 case tcc_expression:
1245 switch (code)
1247 case INIT_EXPR:
1248 case MODIFY_EXPR:
1249 case VA_ARG_EXPR:
1250 case PREDECREMENT_EXPR:
1251 case PREINCREMENT_EXPR:
1252 case POSTDECREMENT_EXPR:
1253 case POSTINCREMENT_EXPR:
1254 /* All of these have side-effects, no matter what their
1255 operands are. */
1256 TREE_SIDE_EFFECTS (t) = 1;
1257 break;
1259 default:
1260 break;
1262 break;
1264 case tcc_exceptional:
1265 switch (code)
1267 case TARGET_OPTION_NODE:
1268 TREE_TARGET_OPTION(t)
1269 = ggc_cleared_alloc<struct cl_target_option> ();
1270 break;
1272 case OPTIMIZATION_NODE:
1273 TREE_OPTIMIZATION (t)
1274 = ggc_cleared_alloc<struct cl_optimization> ();
1275 break;
1277 default:
1278 break;
1280 break;
1282 default:
1283 /* Other classes need no special treatment. */
1284 break;
1287 return t;
1290 /* Free tree node. */
1292 void
1293 free_node (tree node)
1295 enum tree_code code = TREE_CODE (node);
1296 if (GATHER_STATISTICS)
1298 enum tree_node_kind kind = get_stats_node_kind (code);
1300 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1301 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1302 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1304 tree_code_counts[(int) TREE_CODE (node)]--;
1305 tree_node_counts[(int) kind]--;
1306 tree_node_sizes[(int) kind] -= tree_size (node);
1308 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1309 vec_free (CONSTRUCTOR_ELTS (node));
1310 else if (code == BLOCK)
1311 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1312 else if (code == TREE_BINFO)
1313 vec_free (BINFO_BASE_ACCESSES (node));
1314 else if (code == OPTIMIZATION_NODE)
1315 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1316 else if (code == TARGET_OPTION_NODE)
1317 cl_target_option_free (TREE_TARGET_OPTION (node));
1318 ggc_free (node);
1321 /* Return a new node with the same contents as NODE except that its
1322 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1324 tree
1325 copy_node (tree node MEM_STAT_DECL)
1327 tree t;
1328 enum tree_code code = TREE_CODE (node);
1329 size_t length;
1331 gcc_assert (code != STATEMENT_LIST);
1333 length = tree_size (node);
1334 record_node_allocation_statistics (code, length);
1335 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1336 memcpy (t, node, length);
1338 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1339 TREE_CHAIN (t) = 0;
1340 TREE_ASM_WRITTEN (t) = 0;
1341 TREE_VISITED (t) = 0;
1343 if (TREE_CODE_CLASS (code) == tcc_declaration)
1345 if (code == DEBUG_EXPR_DECL)
1346 DECL_UID (t) = --next_debug_decl_uid;
1347 else
1349 DECL_UID (t) = allocate_decl_uid ();
1350 if (DECL_PT_UID_SET_P (node))
1351 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1353 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1354 && DECL_HAS_VALUE_EXPR_P (node))
1356 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1357 DECL_HAS_VALUE_EXPR_P (t) = 1;
1359 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1360 if (VAR_P (node))
1362 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1363 t->decl_with_vis.symtab_node = NULL;
1365 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1367 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1368 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1370 if (TREE_CODE (node) == FUNCTION_DECL)
1372 DECL_STRUCT_FUNCTION (t) = NULL;
1373 t->decl_with_vis.symtab_node = NULL;
1376 else if (TREE_CODE_CLASS (code) == tcc_type)
1378 TYPE_UID (t) = next_type_uid++;
1379 /* The following is so that the debug code for
1380 the copy is different from the original type.
1381 The two statements usually duplicate each other
1382 (because they clear fields of the same union),
1383 but the optimizer should catch that. */
1384 TYPE_SYMTAB_ADDRESS (t) = 0;
1385 TYPE_SYMTAB_DIE (t) = 0;
1387 /* Do not copy the values cache. */
1388 if (TYPE_CACHED_VALUES_P (t))
1390 TYPE_CACHED_VALUES_P (t) = 0;
1391 TYPE_CACHED_VALUES (t) = NULL_TREE;
1394 else if (code == TARGET_OPTION_NODE)
1396 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1397 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1398 sizeof (struct cl_target_option));
1400 else if (code == OPTIMIZATION_NODE)
1402 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1403 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1404 sizeof (struct cl_optimization));
1407 return t;
1410 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1411 For example, this can copy a list made of TREE_LIST nodes. */
1413 tree
1414 copy_list (tree list)
1416 tree head;
1417 tree prev, next;
1419 if (list == 0)
1420 return 0;
1422 head = prev = copy_node (list);
1423 next = TREE_CHAIN (list);
1424 while (next)
1426 TREE_CHAIN (prev) = copy_node (next);
1427 prev = TREE_CHAIN (prev);
1428 next = TREE_CHAIN (next);
1430 return head;
1434 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1435 INTEGER_CST with value CST and type TYPE. */
1437 static unsigned int
1438 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1440 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1441 /* We need extra HWIs if CST is an unsigned integer with its
1442 upper bit set. */
1443 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1444 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1445 return cst.get_len ();
1448 /* Return a new INTEGER_CST with value CST and type TYPE. */
1450 static tree
1451 build_new_int_cst (tree type, const wide_int &cst)
1453 unsigned int len = cst.get_len ();
1454 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1455 tree nt = make_int_cst (len, ext_len);
1457 if (len < ext_len)
1459 --ext_len;
1460 TREE_INT_CST_ELT (nt, ext_len)
1461 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1462 for (unsigned int i = len; i < ext_len; ++i)
1463 TREE_INT_CST_ELT (nt, i) = -1;
1465 else if (TYPE_UNSIGNED (type)
1466 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1468 len--;
1469 TREE_INT_CST_ELT (nt, len)
1470 = zext_hwi (cst.elt (len),
1471 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1474 for (unsigned int i = 0; i < len; i++)
1475 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1476 TREE_TYPE (nt) = type;
1477 return nt;
1480 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1482 static tree
1483 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1484 CXX_MEM_STAT_INFO)
1486 size_t length = sizeof (struct tree_poly_int_cst);
1487 record_node_allocation_statistics (POLY_INT_CST, length);
1489 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1491 TREE_SET_CODE (t, POLY_INT_CST);
1492 TREE_CONSTANT (t) = 1;
1493 TREE_TYPE (t) = type;
1494 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1495 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1496 return t;
1499 /* Create a constant tree that contains CST sign-extended to TYPE. */
1501 tree
1502 build_int_cst (tree type, poly_int64 cst)
1504 /* Support legacy code. */
1505 if (!type)
1506 type = integer_type_node;
1508 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1511 /* Create a constant tree that contains CST zero-extended to TYPE. */
1513 tree
1514 build_int_cstu (tree type, poly_uint64 cst)
1516 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1519 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521 tree
1522 build_int_cst_type (tree type, poly_int64 cst)
1524 gcc_assert (type);
1525 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1528 /* Constructs tree in type TYPE from with value given by CST. Signedness
1529 of CST is assumed to be the same as the signedness of TYPE. */
1531 tree
1532 double_int_to_tree (tree type, double_int cst)
1534 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1537 /* We force the wide_int CST to the range of the type TYPE by sign or
1538 zero extending it. OVERFLOWABLE indicates if we are interested in
1539 overflow of the value, when >0 we are only interested in signed
1540 overflow, for <0 we are interested in any overflow. OVERFLOWED
1541 indicates whether overflow has already occurred. CONST_OVERFLOWED
1542 indicates whether constant overflow has already occurred. We force
1543 T's value to be within range of T's type (by setting to 0 or 1 all
1544 the bits outside the type's range). We set TREE_OVERFLOWED if,
1545 OVERFLOWED is nonzero,
1546 or OVERFLOWABLE is >0 and signed overflow occurs
1547 or OVERFLOWABLE is <0 and any overflow occurs
1548 We return a new tree node for the extended wide_int. The node
1549 is shared if no overflow flags are set. */
1552 tree
1553 force_fit_type (tree type, const poly_wide_int_ref &cst,
1554 int overflowable, bool overflowed)
1556 signop sign = TYPE_SIGN (type);
1558 /* If we need to set overflow flags, return a new unshared node. */
1559 if (overflowed || !wi::fits_to_tree_p (cst, type))
1561 if (overflowed
1562 || overflowable < 0
1563 || (overflowable > 0 && sign == SIGNED))
1565 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1566 sign);
1567 tree t;
1568 if (tmp.is_constant ())
1569 t = build_new_int_cst (type, tmp.coeffs[0]);
1570 else
1572 tree coeffs[NUM_POLY_INT_COEFFS];
1573 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1575 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1576 TREE_OVERFLOW (coeffs[i]) = 1;
1578 t = build_new_poly_int_cst (type, coeffs);
1580 TREE_OVERFLOW (t) = 1;
1581 return t;
1585 /* Else build a shared node. */
1586 return wide_int_to_tree (type, cst);
1589 /* These are the hash table functions for the hash table of INTEGER_CST
1590 nodes of a sizetype. */
1592 /* Return the hash code X, an INTEGER_CST. */
1594 hashval_t
1595 int_cst_hasher::hash (tree x)
1597 const_tree const t = x;
1598 hashval_t code = TYPE_UID (TREE_TYPE (t));
1599 int i;
1601 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1602 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1604 return code;
1607 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1608 is the same as that given by *Y, which is the same. */
1610 bool
1611 int_cst_hasher::equal (tree x, tree y)
1613 const_tree const xt = x;
1614 const_tree const yt = y;
1616 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1617 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1618 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1619 return false;
1621 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1622 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1623 return false;
1625 return true;
1628 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1629 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1630 number of slots that can be cached for the type. */
1632 static inline tree
1633 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1634 int slot, int max_slots)
1636 gcc_checking_assert (slot >= 0);
1637 /* Initialize cache. */
1638 if (!TYPE_CACHED_VALUES_P (type))
1640 TYPE_CACHED_VALUES_P (type) = 1;
1641 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1643 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1644 if (!t)
1646 /* Create a new shared int. */
1647 t = build_new_int_cst (type, cst);
1648 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1650 return t;
1653 /* Create an INT_CST node of TYPE and value CST.
1654 The returned node is always shared. For small integers we use a
1655 per-type vector cache, for larger ones we use a single hash table.
1656 The value is extended from its precision according to the sign of
1657 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1658 the upper bits and ensures that hashing and value equality based
1659 upon the underlying HOST_WIDE_INTs works without masking. */
1661 static tree
1662 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1664 tree t;
1665 int ix = -1;
1666 int limit = 0;
1668 gcc_assert (type);
1669 unsigned int prec = TYPE_PRECISION (type);
1670 signop sgn = TYPE_SIGN (type);
1672 /* Verify that everything is canonical. */
1673 int l = pcst.get_len ();
1674 if (l > 1)
1676 if (pcst.elt (l - 1) == 0)
1677 gcc_checking_assert (pcst.elt (l - 2) < 0);
1678 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1679 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1682 wide_int cst = wide_int::from (pcst, prec, sgn);
1683 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1685 enum tree_code code = TREE_CODE (type);
1686 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1688 /* Cache NULL pointer and zero bounds. */
1689 if (cst == 0)
1690 ix = 0;
1691 /* Cache upper bounds of pointers. */
1692 else if (cst == wi::max_value (prec, sgn))
1693 ix = 1;
1694 /* Cache 1 which is used for a non-zero range. */
1695 else if (cst == 1)
1696 ix = 2;
1698 if (ix >= 0)
1700 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1701 /* Make sure no one is clobbering the shared constant. */
1702 gcc_checking_assert (TREE_TYPE (t) == type
1703 && cst == wi::to_wide (t));
1704 return t;
1707 if (ext_len == 1)
1709 /* We just need to store a single HOST_WIDE_INT. */
1710 HOST_WIDE_INT hwi;
1711 if (TYPE_UNSIGNED (type))
1712 hwi = cst.to_uhwi ();
1713 else
1714 hwi = cst.to_shwi ();
1716 switch (code)
1718 case NULLPTR_TYPE:
1719 gcc_assert (hwi == 0);
1720 /* Fallthru. */
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Ignore pointers, as they were already handled above. */
1725 break;
1727 case BOOLEAN_TYPE:
1728 /* Cache false or true. */
1729 limit = 2;
1730 if (IN_RANGE (hwi, 0, 1))
1731 ix = hwi;
1732 break;
1734 case INTEGER_TYPE:
1735 case OFFSET_TYPE:
1736 case BITINT_TYPE:
1737 if (TYPE_SIGN (type) == UNSIGNED)
1739 /* Cache [0, N). */
1740 limit = param_integer_share_limit;
1741 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1742 ix = hwi;
1744 else
1746 /* Cache [-1, N). */
1747 limit = param_integer_share_limit + 1;
1748 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1749 ix = hwi + 1;
1751 break;
1753 case ENUMERAL_TYPE:
1754 break;
1756 default:
1757 gcc_unreachable ();
1760 if (ix >= 0)
1762 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t) == type
1765 && TREE_INT_CST_NUNITS (t) == 1
1766 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1767 && TREE_INT_CST_EXT_NUNITS (t) == 1
1768 && TREE_INT_CST_ELT (t, 0) == hwi);
1769 return t;
1771 else
1773 /* Use the cache of larger shared ints, using int_cst_node as
1774 a temporary. */
1776 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1777 TREE_TYPE (int_cst_node) = type;
1779 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1780 t = *slot;
1781 if (!t)
1783 /* Insert this one into the hash table. */
1784 t = int_cst_node;
1785 *slot = t;
1786 /* Make a new node for next time round. */
1787 int_cst_node = make_int_cst (1, 1);
1791 else
1793 /* The value either hashes properly or we drop it on the floor
1794 for the gc to take care of. There will not be enough of them
1795 to worry about. */
1797 tree nt = build_new_int_cst (type, cst);
1798 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1799 t = *slot;
1800 if (!t)
1802 /* Insert this one into the hash table. */
1803 t = nt;
1804 *slot = t;
1806 else
1807 ggc_free (nt);
1810 return t;
1813 hashval_t
1814 poly_int_cst_hasher::hash (tree t)
1816 inchash::hash hstate;
1818 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1819 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1820 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1822 return hstate.end ();
1825 bool
1826 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1828 if (TREE_TYPE (x) != y.first)
1829 return false;
1830 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1831 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1832 return false;
1833 return true;
1836 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1837 The elements must also have type TYPE. */
1839 tree
1840 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1842 unsigned int prec = TYPE_PRECISION (type);
1843 gcc_assert (prec <= values.coeffs[0].get_precision ());
1844 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1846 inchash::hash h;
1847 h.add_int (TYPE_UID (type));
1848 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1849 h.add_wide_int (c.coeffs[i]);
1850 poly_int_cst_hasher::compare_type comp (type, &c);
1851 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1852 INSERT);
1853 if (*slot == NULL_TREE)
1855 tree coeffs[NUM_POLY_INT_COEFFS];
1856 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1857 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1858 *slot = build_new_poly_int_cst (type, coeffs);
1860 return *slot;
1863 /* Create a constant tree with value VALUE in type TYPE. */
1865 tree
1866 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1868 if (value.is_constant ())
1869 return wide_int_to_tree_1 (type, value.coeffs[0]);
1870 return build_poly_int_cst (type, value);
1873 /* Insert INTEGER_CST T into a cache of integer constants. And return
1874 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1875 is false, and T falls into the type's 'smaller values' range, there
1876 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1877 or the value is large, should an existing entry exist, it is
1878 returned (rather than inserting T). */
1880 tree
1881 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1883 tree type = TREE_TYPE (t);
1884 int ix = -1;
1885 int limit = 0;
1886 int prec = TYPE_PRECISION (type);
1888 gcc_assert (!TREE_OVERFLOW (t));
1890 /* The caching indices here must match those in
1891 wide_int_to_type_1. */
1892 switch (TREE_CODE (type))
1894 case NULLPTR_TYPE:
1895 gcc_checking_assert (integer_zerop (t));
1896 /* Fallthru. */
1898 case POINTER_TYPE:
1899 case REFERENCE_TYPE:
1901 if (integer_zerop (t))
1902 ix = 0;
1903 else if (integer_onep (t))
1904 ix = 2;
1906 if (ix >= 0)
1907 limit = 3;
1909 break;
1911 case BOOLEAN_TYPE:
1912 /* Cache false or true. */
1913 limit = 2;
1914 if (wi::ltu_p (wi::to_wide (t), 2))
1915 ix = TREE_INT_CST_ELT (t, 0);
1916 break;
1918 case INTEGER_TYPE:
1919 case OFFSET_TYPE:
1920 case BITINT_TYPE:
1921 if (TYPE_UNSIGNED (type))
1923 /* Cache 0..N */
1924 limit = param_integer_share_limit;
1926 /* This is a little hokie, but if the prec is smaller than
1927 what is necessary to hold param_integer_share_limit, then the
1928 obvious test will not get the correct answer. */
1929 if (prec < HOST_BITS_PER_WIDE_INT)
1931 if (tree_to_uhwi (t)
1932 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1933 ix = tree_to_uhwi (t);
1935 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1936 ix = tree_to_uhwi (t);
1938 else
1940 /* Cache -1..N */
1941 limit = param_integer_share_limit + 1;
1943 if (integer_minus_onep (t))
1944 ix = 0;
1945 else if (!wi::neg_p (wi::to_wide (t)))
1947 if (prec < HOST_BITS_PER_WIDE_INT)
1949 if (tree_to_shwi (t) < param_integer_share_limit)
1950 ix = tree_to_shwi (t) + 1;
1952 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1953 ix = tree_to_shwi (t) + 1;
1956 break;
1958 case ENUMERAL_TYPE:
1959 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1960 members. */
1961 break;
1963 default:
1964 gcc_unreachable ();
1967 if (ix >= 0)
1969 /* Look for it in the type's vector of small shared ints. */
1970 if (!TYPE_CACHED_VALUES_P (type))
1972 TYPE_CACHED_VALUES_P (type) = 1;
1973 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1976 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1978 gcc_checking_assert (might_duplicate);
1979 t = r;
1981 else
1982 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1984 else
1986 /* Use the cache of larger shared ints. */
1987 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1988 if (tree r = *slot)
1990 /* If there is already an entry for the number verify it's the
1991 same value. */
1992 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1993 /* And return the cached value. */
1994 t = r;
1996 else
1997 /* Otherwise insert this one into the hash table. */
1998 *slot = t;
2001 return t;
2005 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2006 and the rest are zeros. */
2008 tree
2009 build_low_bits_mask (tree type, unsigned bits)
2011 gcc_assert (bits <= TYPE_PRECISION (type));
2013 return wide_int_to_tree (type, wi::mask (bits, false,
2014 TYPE_PRECISION (type)));
2017 /* Checks that X is integer constant that can be expressed in (unsigned)
2018 HOST_WIDE_INT without loss of precision. */
2020 bool
2021 cst_and_fits_in_hwi (const_tree x)
2023 return (TREE_CODE (x) == INTEGER_CST
2024 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2027 /* Build a newly constructed VECTOR_CST with the given values of
2028 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2030 tree
2031 make_vector (unsigned log2_npatterns,
2032 unsigned int nelts_per_pattern MEM_STAT_DECL)
2034 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2035 tree t;
2036 unsigned npatterns = 1 << log2_npatterns;
2037 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2038 unsigned length = (sizeof (struct tree_vector)
2039 + (encoded_nelts - 1) * sizeof (tree));
2041 record_node_allocation_statistics (VECTOR_CST, length);
2043 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2045 TREE_SET_CODE (t, VECTOR_CST);
2046 TREE_CONSTANT (t) = 1;
2047 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2048 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2050 return t;
2053 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2054 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2056 tree
2057 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2059 if (vec_safe_length (v) == 0)
2060 return build_zero_cst (type);
2062 unsigned HOST_WIDE_INT idx, nelts;
2063 tree value;
2065 /* We can't construct a VECTOR_CST for a variable number of elements. */
2066 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2067 tree_vector_builder vec (type, nelts, 1);
2068 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2070 if (TREE_CODE (value) == VECTOR_CST)
2072 /* If NELTS is constant then this must be too. */
2073 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2074 for (unsigned i = 0; i < sub_nelts; ++i)
2075 vec.quick_push (VECTOR_CST_ELT (value, i));
2077 else
2078 vec.quick_push (value);
2080 while (vec.length () < nelts)
2081 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2083 return vec.build ();
2086 /* Build a vector of type VECTYPE where all the elements are SCs. */
2087 tree
2088 build_vector_from_val (tree vectype, tree sc)
2090 unsigned HOST_WIDE_INT i, nunits;
2092 if (sc == error_mark_node)
2093 return sc;
2095 /* Verify that the vector type is suitable for SC. Note that there
2096 is some inconsistency in the type-system with respect to restrict
2097 qualifications of pointers. Vector types always have a main-variant
2098 element type and the qualification is applied to the vector-type.
2099 So TREE_TYPE (vector-type) does not return a properly qualified
2100 vector element-type. */
2101 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2102 TREE_TYPE (vectype)));
2104 if (CONSTANT_CLASS_P (sc))
2106 tree_vector_builder v (vectype, 1, 1);
2107 v.quick_push (sc);
2108 return v.build ();
2110 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2111 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2112 else
2114 vec<constructor_elt, va_gc> *v;
2115 vec_alloc (v, nunits);
2116 for (i = 0; i < nunits; ++i)
2117 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2118 return build_constructor (vectype, v);
2122 /* If TYPE is not a vector type, just return SC, otherwise return
2123 build_vector_from_val (TYPE, SC). */
2125 tree
2126 build_uniform_cst (tree type, tree sc)
2128 if (!VECTOR_TYPE_P (type))
2129 return sc;
2131 return build_vector_from_val (type, sc);
2134 /* Build a vector series of type TYPE in which element I has the value
2135 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2136 and a VEC_SERIES_EXPR otherwise. */
2138 tree
2139 build_vec_series (tree type, tree base, tree step)
2141 if (integer_zerop (step))
2142 return build_vector_from_val (type, base);
2143 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2145 tree_vector_builder builder (type, 1, 3);
2146 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2147 wi::to_wide (base) + wi::to_wide (step));
2148 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2149 wi::to_wide (elt1) + wi::to_wide (step));
2150 builder.quick_push (base);
2151 builder.quick_push (elt1);
2152 builder.quick_push (elt2);
2153 return builder.build ();
2155 return build2 (VEC_SERIES_EXPR, type, base, step);
2158 /* Return a vector with the same number of units and number of bits
2159 as VEC_TYPE, but in which the elements are a linear series of unsigned
2160 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2162 tree
2163 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2165 tree index_vec_type = vec_type;
2166 tree index_elt_type = TREE_TYPE (vec_type);
2167 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2168 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2170 index_elt_type = build_nonstandard_integer_type
2171 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2172 index_vec_type = build_vector_type (index_elt_type, nunits);
2175 tree_vector_builder v (index_vec_type, 1, 3);
2176 for (unsigned int i = 0; i < 3; ++i)
2177 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2178 return v.build ();
2181 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2182 elements are A and the rest are B. */
2184 tree
2185 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2187 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2188 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2189 /* Optimize the constant case. */
2190 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2191 count /= 2;
2192 tree_vector_builder builder (vec_type, count, 2);
2193 for (unsigned int i = 0; i < count * 2; ++i)
2194 builder.quick_push (i < num_a ? a : b);
2195 return builder.build ();
2198 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2199 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2201 void
2202 recompute_constructor_flags (tree c)
2204 unsigned int i;
2205 tree val;
2206 bool constant_p = true;
2207 bool side_effects_p = false;
2208 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2210 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2212 /* Mostly ctors will have elts that don't have side-effects, so
2213 the usual case is to scan all the elements. Hence a single
2214 loop for both const and side effects, rather than one loop
2215 each (with early outs). */
2216 if (!TREE_CONSTANT (val))
2217 constant_p = false;
2218 if (TREE_SIDE_EFFECTS (val))
2219 side_effects_p = true;
2222 TREE_SIDE_EFFECTS (c) = side_effects_p;
2223 TREE_CONSTANT (c) = constant_p;
2226 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2227 CONSTRUCTOR C. */
2229 void
2230 verify_constructor_flags (tree c)
2232 unsigned int i;
2233 tree val;
2234 bool constant_p = TREE_CONSTANT (c);
2235 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2236 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2238 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2240 if (constant_p && !TREE_CONSTANT (val))
2241 internal_error ("non-constant element in constant CONSTRUCTOR");
2242 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2243 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2247 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2248 are in the vec pointed to by VALS. */
2249 tree
2250 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2252 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2254 TREE_TYPE (c) = type;
2255 CONSTRUCTOR_ELTS (c) = vals;
2257 recompute_constructor_flags (c);
2259 return c;
2262 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2263 INDEX and VALUE. */
2264 tree
2265 build_constructor_single (tree type, tree index, tree value)
2267 vec<constructor_elt, va_gc> *v;
2268 constructor_elt elt = {index, value};
2270 vec_alloc (v, 1);
2271 v->quick_push (elt);
2273 return build_constructor (type, v);
2277 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2278 are in a list pointed to by VALS. */
2279 tree
2280 build_constructor_from_list (tree type, tree vals)
2282 tree t;
2283 vec<constructor_elt, va_gc> *v = NULL;
2285 if (vals)
2287 vec_alloc (v, list_length (vals));
2288 for (t = vals; t; t = TREE_CHAIN (t))
2289 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2292 return build_constructor (type, v);
2295 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2296 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2297 fields in the constructor remain null. */
2299 tree
2300 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2302 vec<constructor_elt, va_gc> *v = NULL;
2304 for (tree t : vals)
2305 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2307 return build_constructor (type, v);
2310 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2311 of elements, provided as index/value pairs. */
2313 tree
2314 build_constructor_va (tree type, int nelts, ...)
2316 vec<constructor_elt, va_gc> *v = NULL;
2317 va_list p;
2319 va_start (p, nelts);
2320 vec_alloc (v, nelts);
2321 while (nelts--)
2323 tree index = va_arg (p, tree);
2324 tree value = va_arg (p, tree);
2325 CONSTRUCTOR_APPEND_ELT (v, index, value);
2327 va_end (p);
2328 return build_constructor (type, v);
2331 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2333 tree
2334 build_clobber (tree type, enum clobber_kind kind)
2336 tree clobber = build_constructor (type, NULL);
2337 TREE_THIS_VOLATILE (clobber) = true;
2338 CLOBBER_KIND (clobber) = kind;
2339 return clobber;
2342 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2344 tree
2345 build_fixed (tree type, FIXED_VALUE_TYPE f)
2347 tree v;
2348 FIXED_VALUE_TYPE *fp;
2350 v = make_node (FIXED_CST);
2351 fp = ggc_alloc<fixed_value> ();
2352 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2354 TREE_TYPE (v) = type;
2355 TREE_FIXED_CST_PTR (v) = fp;
2356 return v;
2359 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2361 tree
2362 build_real (tree type, REAL_VALUE_TYPE d)
2364 tree v;
2365 int overflow = 0;
2367 /* dconst{0,1,2,m1,half} are used in various places in
2368 the middle-end and optimizers, allow them here
2369 even for decimal floating point types as an exception
2370 by converting them to decimal. */
2371 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2372 && (d.cl == rvc_normal || d.cl == rvc_zero)
2373 && !d.decimal)
2375 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2376 decimal_real_from_string (&d, "1");
2377 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2378 decimal_real_from_string (&d, "2");
2379 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2380 decimal_real_from_string (&d, "-1");
2381 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2382 decimal_real_from_string (&d, "0.5");
2383 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2385 /* Make sure to give zero the minimum quantum exponent for
2386 the type (which corresponds to all bits zero). */
2387 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2388 char buf[16];
2389 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2390 decimal_real_from_string (&d, buf);
2392 else
2393 gcc_unreachable ();
2396 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2397 Consider doing it via real_convert now. */
2399 v = make_node (REAL_CST);
2400 TREE_TYPE (v) = type;
2401 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2402 TREE_OVERFLOW (v) = overflow;
2403 return v;
2406 /* Like build_real, but first truncate D to the type. */
2408 tree
2409 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2411 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2414 /* Return a new REAL_CST node whose type is TYPE
2415 and whose value is the integer value of the INTEGER_CST node I. */
2417 REAL_VALUE_TYPE
2418 real_value_from_int_cst (const_tree type, const_tree i)
2420 REAL_VALUE_TYPE d;
2422 /* Clear all bits of the real value type so that we can later do
2423 bitwise comparisons to see if two values are the same. */
2424 memset (&d, 0, sizeof d);
2426 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2427 TYPE_SIGN (TREE_TYPE (i)));
2428 return d;
2431 /* Given a tree representing an integer constant I, return a tree
2432 representing the same value as a floating-point constant of type TYPE. */
2434 tree
2435 build_real_from_int_cst (tree type, const_tree i)
2437 tree v;
2438 int overflow = TREE_OVERFLOW (i);
2440 v = build_real (type, real_value_from_int_cst (type, i));
2442 TREE_OVERFLOW (v) |= overflow;
2443 return v;
2446 /* Return a new REAL_CST node whose type is TYPE
2447 and whose value is the integer value I which has sign SGN. */
2449 tree
2450 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2452 REAL_VALUE_TYPE d;
2454 /* Clear all bits of the real value type so that we can later do
2455 bitwise comparisons to see if two values are the same. */
2456 memset (&d, 0, sizeof d);
2458 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2459 return build_real (type, d);
2462 /* Return a newly constructed STRING_CST node whose value is the LEN
2463 characters at STR when STR is nonnull, or all zeros otherwise.
2464 Note that for a C string literal, LEN should include the trailing NUL.
2465 The TREE_TYPE is not initialized. */
2467 tree
2468 build_string (unsigned len, const char *str /*= NULL */)
2470 /* Do not waste bytes provided by padding of struct tree_string. */
2471 unsigned size = len + offsetof (struct tree_string, str) + 1;
2473 record_node_allocation_statistics (STRING_CST, size);
2475 tree s = (tree) ggc_internal_alloc (size);
2477 memset (s, 0, sizeof (struct tree_typed));
2478 TREE_SET_CODE (s, STRING_CST);
2479 TREE_CONSTANT (s) = 1;
2480 TREE_STRING_LENGTH (s) = len;
2481 if (str)
2482 memcpy (s->string.str, str, len);
2483 else
2484 memset (s->string.str, 0, len);
2485 s->string.str[len] = '\0';
2487 return s;
2490 /* Return a newly constructed COMPLEX_CST node whose value is
2491 specified by the real and imaginary parts REAL and IMAG.
2492 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2493 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2495 tree
2496 build_complex (tree type, tree real, tree imag)
2498 gcc_assert (CONSTANT_CLASS_P (real));
2499 gcc_assert (CONSTANT_CLASS_P (imag));
2501 tree t = make_node (COMPLEX_CST);
2503 TREE_REALPART (t) = real;
2504 TREE_IMAGPART (t) = imag;
2505 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2506 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2507 return t;
2510 /* Build a complex (inf +- 0i), such as for the result of cproj.
2511 TYPE is the complex tree type of the result. If NEG is true, the
2512 imaginary zero is negative. */
2514 tree
2515 build_complex_inf (tree type, bool neg)
2517 REAL_VALUE_TYPE rzero = dconst0;
2519 rzero.sign = neg;
2520 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2521 build_real (TREE_TYPE (type), rzero));
2524 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2525 element is set to 1. In particular, this is 1 + i for complex types. */
2527 tree
2528 build_each_one_cst (tree type)
2530 if (TREE_CODE (type) == COMPLEX_TYPE)
2532 tree scalar = build_one_cst (TREE_TYPE (type));
2533 return build_complex (type, scalar, scalar);
2535 else
2536 return build_one_cst (type);
2539 /* Return a constant of arithmetic type TYPE which is the
2540 multiplicative identity of the set TYPE. */
2542 tree
2543 build_one_cst (tree type)
2545 switch (TREE_CODE (type))
2547 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2548 case POINTER_TYPE: case REFERENCE_TYPE:
2549 case OFFSET_TYPE: case BITINT_TYPE:
2550 return build_int_cst (type, 1);
2552 case REAL_TYPE:
2553 return build_real (type, dconst1);
2555 case FIXED_POINT_TYPE:
2556 /* We can only generate 1 for accum types. */
2557 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2558 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2560 case VECTOR_TYPE:
2562 tree scalar = build_one_cst (TREE_TYPE (type));
2564 return build_vector_from_val (type, scalar);
2567 case COMPLEX_TYPE:
2568 return build_complex (type,
2569 build_one_cst (TREE_TYPE (type)),
2570 build_zero_cst (TREE_TYPE (type)));
2572 default:
2573 gcc_unreachable ();
2577 /* Return an integer of type TYPE containing all 1's in as much precision as
2578 it contains, or a complex or vector whose subparts are such integers. */
2580 tree
2581 build_all_ones_cst (tree type)
2583 if (TREE_CODE (type) == COMPLEX_TYPE)
2585 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2586 return build_complex (type, scalar, scalar);
2588 else
2589 return build_minus_one_cst (type);
2592 /* Return a constant of arithmetic type TYPE which is the
2593 opposite of the multiplicative identity of the set TYPE. */
2595 tree
2596 build_minus_one_cst (tree type)
2598 switch (TREE_CODE (type))
2600 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2601 case POINTER_TYPE: case REFERENCE_TYPE:
2602 case OFFSET_TYPE: case BITINT_TYPE:
2603 return build_int_cst (type, -1);
2605 case REAL_TYPE:
2606 return build_real (type, dconstm1);
2608 case FIXED_POINT_TYPE:
2609 /* We can only generate 1 for accum types. */
2610 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2611 return build_fixed (type,
2612 fixed_from_double_int (double_int_minus_one,
2613 SCALAR_TYPE_MODE (type)));
2615 case VECTOR_TYPE:
2617 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2619 return build_vector_from_val (type, scalar);
2622 case COMPLEX_TYPE:
2623 return build_complex (type,
2624 build_minus_one_cst (TREE_TYPE (type)),
2625 build_zero_cst (TREE_TYPE (type)));
2627 default:
2628 gcc_unreachable ();
2632 /* Build 0 constant of type TYPE. This is used by constructor folding
2633 and thus the constant should be represented in memory by
2634 zero(es). */
2636 tree
2637 build_zero_cst (tree type)
2639 switch (TREE_CODE (type))
2641 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2642 case POINTER_TYPE: case REFERENCE_TYPE:
2643 case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
2644 return build_int_cst (type, 0);
2646 case REAL_TYPE:
2647 return build_real (type, dconst0);
2649 case FIXED_POINT_TYPE:
2650 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2652 case VECTOR_TYPE:
2654 tree scalar = build_zero_cst (TREE_TYPE (type));
2656 return build_vector_from_val (type, scalar);
2659 case COMPLEX_TYPE:
2661 tree zero = build_zero_cst (TREE_TYPE (type));
2663 return build_complex (type, zero, zero);
2666 default:
2667 if (!AGGREGATE_TYPE_P (type))
2668 return fold_convert (type, integer_zero_node);
2669 return build_constructor (type, NULL);
2673 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2674 every WIDTH bits to fit TYPE's precision. */
2676 tree
2677 build_replicated_int_cst (tree type, unsigned int width, HOST_WIDE_INT value)
2679 int n = (TYPE_PRECISION (type) + HOST_BITS_PER_WIDE_INT - 1)
2680 / HOST_BITS_PER_WIDE_INT;
2681 unsigned HOST_WIDE_INT low, mask;
2682 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
2683 int i;
2685 gcc_assert (n && n <= WIDE_INT_MAX_ELTS);
2687 if (width == HOST_BITS_PER_WIDE_INT)
2688 low = value;
2689 else
2691 mask = ((HOST_WIDE_INT)1 << width) - 1;
2692 low = (unsigned HOST_WIDE_INT) ~0 / mask * (value & mask);
2695 for (i = 0; i < n; i++)
2696 a[i] = low;
2698 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
2699 return wide_int_to_tree
2700 (type, wide_int::from_array (a, n, TYPE_PRECISION (type)));
2703 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2704 unsigned constant in which only the sign bit is set. Return null
2705 otherwise. */
2707 tree
2708 sign_mask_for (tree type)
2710 /* Avoid having to choose between a real-only sign and a pair of signs.
2711 This could be relaxed if the choice becomes obvious later. */
2712 if (TREE_CODE (type) == COMPLEX_TYPE)
2713 return NULL_TREE;
2715 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2716 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2717 if (!bits || !pow2p_hwi (bits))
2718 return NULL_TREE;
2720 tree inttype = unsigned_type_for (type);
2721 if (!inttype)
2722 return NULL_TREE;
2724 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2725 if (VECTOR_TYPE_P (inttype))
2727 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2728 return build_vector_from_val (inttype, elt);
2730 return wide_int_to_tree (inttype, mask);
2733 /* Build a BINFO with LEN language slots. */
2735 tree
2736 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2738 tree t;
2739 size_t length = (offsetof (struct tree_binfo, base_binfos)
2740 + vec<tree, va_gc>::embedded_size (base_binfos));
2742 record_node_allocation_statistics (TREE_BINFO, length);
2744 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2746 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2748 TREE_SET_CODE (t, TREE_BINFO);
2750 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2752 return t;
2755 /* Create a CASE_LABEL_EXPR tree node and return it. */
2757 tree
2758 build_case_label (tree low_value, tree high_value, tree label_decl)
2760 tree t = make_node (CASE_LABEL_EXPR);
2762 TREE_TYPE (t) = void_type_node;
2763 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2765 CASE_LOW (t) = low_value;
2766 CASE_HIGH (t) = high_value;
2767 CASE_LABEL (t) = label_decl;
2768 CASE_CHAIN (t) = NULL_TREE;
2770 return t;
2773 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2774 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2775 The latter determines the length of the HOST_WIDE_INT vector. */
2777 tree
2778 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2780 tree t;
2781 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2782 + sizeof (struct tree_int_cst));
2784 gcc_assert (len);
2785 record_node_allocation_statistics (INTEGER_CST, length);
2787 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2789 TREE_SET_CODE (t, INTEGER_CST);
2790 TREE_INT_CST_NUNITS (t) = len;
2791 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2792 /* to_offset can only be applied to trees that are offset_int-sized
2793 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2794 must be exactly the precision of offset_int and so LEN is correct. */
2795 if (ext_len <= OFFSET_INT_ELTS)
2796 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2797 else
2798 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2800 TREE_CONSTANT (t) = 1;
2802 return t;
2805 /* Build a newly constructed TREE_VEC node of length LEN. */
2807 tree
2808 make_tree_vec (int len MEM_STAT_DECL)
2810 tree t;
2811 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2813 record_node_allocation_statistics (TREE_VEC, length);
2815 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2817 TREE_SET_CODE (t, TREE_VEC);
2818 TREE_VEC_LENGTH (t) = len;
2820 return t;
2823 /* Grow a TREE_VEC node to new length LEN. */
2825 tree
2826 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2828 gcc_assert (TREE_CODE (v) == TREE_VEC);
2830 int oldlen = TREE_VEC_LENGTH (v);
2831 gcc_assert (len > oldlen);
2833 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2834 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2836 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2838 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2840 TREE_VEC_LENGTH (v) = len;
2842 return v;
2845 /* Return true if EXPR is the constant zero, whether it is integral, float or
2846 fixed, and scalar, complex or vector. */
2848 bool
2849 zerop (const_tree expr)
2851 return (integer_zerop (expr)
2852 || real_zerop (expr)
2853 || fixed_zerop (expr));
2856 /* Return true if EXPR is the integer constant zero or a complex constant
2857 of zero, or a location wrapper for such a constant. */
2859 bool
2860 integer_zerop (const_tree expr)
2862 STRIP_ANY_LOCATION_WRAPPER (expr);
2864 switch (TREE_CODE (expr))
2866 case INTEGER_CST:
2867 return wi::to_wide (expr) == 0;
2868 case COMPLEX_CST:
2869 return (integer_zerop (TREE_REALPART (expr))
2870 && integer_zerop (TREE_IMAGPART (expr)));
2871 case VECTOR_CST:
2872 return (VECTOR_CST_NPATTERNS (expr) == 1
2873 && VECTOR_CST_DUPLICATE_P (expr)
2874 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2875 default:
2876 return false;
2880 /* Return true if EXPR is the integer constant one or the corresponding
2881 complex constant, or a location wrapper for such a constant. */
2883 bool
2884 integer_onep (const_tree expr)
2886 STRIP_ANY_LOCATION_WRAPPER (expr);
2888 switch (TREE_CODE (expr))
2890 case INTEGER_CST:
2891 return wi::eq_p (wi::to_widest (expr), 1);
2892 case COMPLEX_CST:
2893 return (integer_onep (TREE_REALPART (expr))
2894 && integer_zerop (TREE_IMAGPART (expr)));
2895 case VECTOR_CST:
2896 return (VECTOR_CST_NPATTERNS (expr) == 1
2897 && VECTOR_CST_DUPLICATE_P (expr)
2898 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2899 default:
2900 return false;
2904 /* Return true if EXPR is the integer constant one. For complex and vector,
2905 return true if every piece is the integer constant one.
2906 Also return true for location wrappers for such a constant. */
2908 bool
2909 integer_each_onep (const_tree expr)
2911 STRIP_ANY_LOCATION_WRAPPER (expr);
2913 if (TREE_CODE (expr) == COMPLEX_CST)
2914 return (integer_onep (TREE_REALPART (expr))
2915 && integer_onep (TREE_IMAGPART (expr)));
2916 else
2917 return integer_onep (expr);
2920 /* Return true if EXPR is an integer containing all 1's in as much precision
2921 as it contains, or a complex or vector whose subparts are such integers,
2922 or a location wrapper for such a constant. */
2924 bool
2925 integer_all_onesp (const_tree expr)
2927 STRIP_ANY_LOCATION_WRAPPER (expr);
2929 if (TREE_CODE (expr) == COMPLEX_CST
2930 && integer_all_onesp (TREE_REALPART (expr))
2931 && integer_all_onesp (TREE_IMAGPART (expr)))
2932 return true;
2934 else if (TREE_CODE (expr) == VECTOR_CST)
2935 return (VECTOR_CST_NPATTERNS (expr) == 1
2936 && VECTOR_CST_DUPLICATE_P (expr)
2937 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2939 else if (TREE_CODE (expr) != INTEGER_CST)
2940 return false;
2942 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2943 == wi::to_wide (expr));
2946 /* Return true if EXPR is the integer constant minus one, or a location
2947 wrapper for such a constant. */
2949 bool
2950 integer_minus_onep (const_tree expr)
2952 STRIP_ANY_LOCATION_WRAPPER (expr);
2954 if (TREE_CODE (expr) == COMPLEX_CST)
2955 return (integer_all_onesp (TREE_REALPART (expr))
2956 && integer_zerop (TREE_IMAGPART (expr)));
2957 else
2958 return integer_all_onesp (expr);
2961 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2962 only one bit on), or a location wrapper for such a constant. */
2964 bool
2965 integer_pow2p (const_tree expr)
2967 STRIP_ANY_LOCATION_WRAPPER (expr);
2969 if (TREE_CODE (expr) == COMPLEX_CST
2970 && integer_pow2p (TREE_REALPART (expr))
2971 && integer_zerop (TREE_IMAGPART (expr)))
2972 return true;
2974 if (TREE_CODE (expr) != INTEGER_CST)
2975 return false;
2977 return wi::popcount (wi::to_wide (expr)) == 1;
2980 /* Return true if EXPR is an integer constant other than zero or a
2981 complex constant other than zero, or a location wrapper for such a
2982 constant. */
2984 bool
2985 integer_nonzerop (const_tree expr)
2987 STRIP_ANY_LOCATION_WRAPPER (expr);
2989 return ((TREE_CODE (expr) == INTEGER_CST
2990 && wi::to_wide (expr) != 0)
2991 || (TREE_CODE (expr) == COMPLEX_CST
2992 && (integer_nonzerop (TREE_REALPART (expr))
2993 || integer_nonzerop (TREE_IMAGPART (expr)))));
2996 /* Return true if EXPR is the integer constant one. For vector,
2997 return true if every piece is the integer constant minus one
2998 (representing the value TRUE).
2999 Also return true for location wrappers for such a constant. */
3001 bool
3002 integer_truep (const_tree expr)
3004 STRIP_ANY_LOCATION_WRAPPER (expr);
3006 if (TREE_CODE (expr) == VECTOR_CST)
3007 return integer_all_onesp (expr);
3008 return integer_onep (expr);
3011 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3012 for such a constant. */
3014 bool
3015 fixed_zerop (const_tree expr)
3017 STRIP_ANY_LOCATION_WRAPPER (expr);
3019 return (TREE_CODE (expr) == FIXED_CST
3020 && TREE_FIXED_CST (expr).data.is_zero ());
3023 /* Return the power of two represented by a tree node known to be a
3024 power of two. */
3027 tree_log2 (const_tree expr)
3029 if (TREE_CODE (expr) == COMPLEX_CST)
3030 return tree_log2 (TREE_REALPART (expr));
3032 return wi::exact_log2 (wi::to_wide (expr));
3035 /* Similar, but return the largest integer Y such that 2 ** Y is less
3036 than or equal to EXPR. */
3039 tree_floor_log2 (const_tree expr)
3041 if (TREE_CODE (expr) == COMPLEX_CST)
3042 return tree_log2 (TREE_REALPART (expr));
3044 return wi::floor_log2 (wi::to_wide (expr));
3047 /* Return number of known trailing zero bits in EXPR, or, if the value of
3048 EXPR is known to be zero, the precision of it's type. */
3050 unsigned int
3051 tree_ctz (const_tree expr)
3053 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3054 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3055 return 0;
3057 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3058 switch (TREE_CODE (expr))
3060 case INTEGER_CST:
3061 ret1 = wi::ctz (wi::to_wide (expr));
3062 return MIN (ret1, prec);
3063 case SSA_NAME:
3064 ret1 = wi::ctz (get_nonzero_bits (expr));
3065 return MIN (ret1, prec);
3066 case PLUS_EXPR:
3067 case MINUS_EXPR:
3068 case BIT_IOR_EXPR:
3069 case BIT_XOR_EXPR:
3070 case MIN_EXPR:
3071 case MAX_EXPR:
3072 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3073 if (ret1 == 0)
3074 return ret1;
3075 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3076 return MIN (ret1, ret2);
3077 case POINTER_PLUS_EXPR:
3078 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3079 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3080 /* Second operand is sizetype, which could be in theory
3081 wider than pointer's precision. Make sure we never
3082 return more than prec. */
3083 ret2 = MIN (ret2, prec);
3084 return MIN (ret1, ret2);
3085 case BIT_AND_EXPR:
3086 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3087 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3088 return MAX (ret1, ret2);
3089 case MULT_EXPR:
3090 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3091 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3092 return MIN (ret1 + ret2, prec);
3093 case LSHIFT_EXPR:
3094 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3095 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3096 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3098 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3099 return MIN (ret1 + ret2, prec);
3101 return ret1;
3102 case RSHIFT_EXPR:
3103 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3104 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3106 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3107 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3108 if (ret1 > ret2)
3109 return ret1 - ret2;
3111 return 0;
3112 case TRUNC_DIV_EXPR:
3113 case CEIL_DIV_EXPR:
3114 case FLOOR_DIV_EXPR:
3115 case ROUND_DIV_EXPR:
3116 case EXACT_DIV_EXPR:
3117 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3118 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3120 int l = tree_log2 (TREE_OPERAND (expr, 1));
3121 if (l >= 0)
3123 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3124 ret2 = l;
3125 if (ret1 > ret2)
3126 return ret1 - ret2;
3129 return 0;
3130 CASE_CONVERT:
3131 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3132 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3133 ret1 = prec;
3134 return MIN (ret1, prec);
3135 case SAVE_EXPR:
3136 return tree_ctz (TREE_OPERAND (expr, 0));
3137 case COND_EXPR:
3138 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3139 if (ret1 == 0)
3140 return 0;
3141 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3142 return MIN (ret1, ret2);
3143 case COMPOUND_EXPR:
3144 return tree_ctz (TREE_OPERAND (expr, 1));
3145 case ADDR_EXPR:
3146 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3147 if (ret1 > BITS_PER_UNIT)
3149 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3150 return MIN (ret1, prec);
3152 return 0;
3153 default:
3154 return 0;
3158 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3159 decimal float constants, so don't return true for them.
3160 Also return true for location wrappers around such a constant. */
3162 bool
3163 real_zerop (const_tree expr)
3165 STRIP_ANY_LOCATION_WRAPPER (expr);
3167 switch (TREE_CODE (expr))
3169 case REAL_CST:
3170 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3171 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3172 case COMPLEX_CST:
3173 return real_zerop (TREE_REALPART (expr))
3174 && real_zerop (TREE_IMAGPART (expr));
3175 case VECTOR_CST:
3177 /* Don't simply check for a duplicate because the predicate
3178 accepts both +0.0 and -0.0. */
3179 unsigned count = vector_cst_encoded_nelts (expr);
3180 for (unsigned int i = 0; i < count; ++i)
3181 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3182 return false;
3183 return true;
3185 default:
3186 return false;
3190 /* Return true if EXPR is the real constant one in real or complex form.
3191 Trailing zeroes matter for decimal float constants, so don't return
3192 true for them.
3193 Also return true for location wrappers around such a constant. */
3195 bool
3196 real_onep (const_tree expr)
3198 STRIP_ANY_LOCATION_WRAPPER (expr);
3200 switch (TREE_CODE (expr))
3202 case REAL_CST:
3203 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3204 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3205 case COMPLEX_CST:
3206 return real_onep (TREE_REALPART (expr))
3207 && real_zerop (TREE_IMAGPART (expr));
3208 case VECTOR_CST:
3209 return (VECTOR_CST_NPATTERNS (expr) == 1
3210 && VECTOR_CST_DUPLICATE_P (expr)
3211 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3212 default:
3213 return false;
3217 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3218 matter for decimal float constants, so don't return true for them.
3219 Also return true for location wrappers around such a constant. */
3221 bool
3222 real_minus_onep (const_tree expr)
3224 STRIP_ANY_LOCATION_WRAPPER (expr);
3226 switch (TREE_CODE (expr))
3228 case REAL_CST:
3229 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3230 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3231 case COMPLEX_CST:
3232 return real_minus_onep (TREE_REALPART (expr))
3233 && real_zerop (TREE_IMAGPART (expr));
3234 case VECTOR_CST:
3235 return (VECTOR_CST_NPATTERNS (expr) == 1
3236 && VECTOR_CST_DUPLICATE_P (expr)
3237 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3238 default:
3239 return false;
3243 /* Return true if T could be a floating point zero. */
3245 bool
3246 real_maybe_zerop (const_tree expr)
3248 switch (TREE_CODE (expr))
3250 case REAL_CST:
3251 /* Can't use real_zerop here, as it always returns false for decimal
3252 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3253 either, as decimal zeros are rvc_normal. */
3254 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3255 case COMPLEX_CST:
3256 return (real_maybe_zerop (TREE_REALPART (expr))
3257 || real_maybe_zerop (TREE_IMAGPART (expr)));
3258 case VECTOR_CST:
3260 unsigned count = vector_cst_encoded_nelts (expr);
3261 for (unsigned int i = 0; i < count; ++i)
3262 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3263 return true;
3264 return false;
3266 default:
3267 /* Perhaps for SSA_NAMEs we could query frange. */
3268 return true;
3272 /* True if EXP is a constant or a cast of a constant. */
3274 bool
3275 really_constant_p (const_tree exp)
3277 /* This is not quite the same as STRIP_NOPS. It does more. */
3278 while (CONVERT_EXPR_P (exp)
3279 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3280 exp = TREE_OPERAND (exp, 0);
3281 return TREE_CONSTANT (exp);
3284 /* Return true if T holds a polynomial pointer difference, storing it in
3285 *VALUE if so. A true return means that T's precision is no greater
3286 than 64 bits, which is the largest address space we support, so *VALUE
3287 never loses precision. However, the signedness of the result does
3288 not necessarily match the signedness of T: sometimes an unsigned type
3289 like sizetype is used to encode a value that is actually negative. */
3291 bool
3292 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3294 if (!t)
3295 return false;
3296 if (TREE_CODE (t) == INTEGER_CST)
3298 if (!cst_and_fits_in_hwi (t))
3299 return false;
3300 *value = int_cst_value (t);
3301 return true;
3303 if (POLY_INT_CST_P (t))
3305 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3306 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3307 return false;
3308 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3309 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3310 return true;
3312 return false;
3315 poly_int64
3316 tree_to_poly_int64 (const_tree t)
3318 gcc_assert (tree_fits_poly_int64_p (t));
3319 if (POLY_INT_CST_P (t))
3320 return poly_int_cst_value (t).force_shwi ();
3321 return TREE_INT_CST_LOW (t);
3324 poly_uint64
3325 tree_to_poly_uint64 (const_tree t)
3327 gcc_assert (tree_fits_poly_uint64_p (t));
3328 if (POLY_INT_CST_P (t))
3329 return poly_int_cst_value (t).force_uhwi ();
3330 return TREE_INT_CST_LOW (t);
3333 /* Return first list element whose TREE_VALUE is ELEM.
3334 Return 0 if ELEM is not in LIST. */
3336 tree
3337 value_member (tree elem, tree list)
3339 while (list)
3341 if (elem == TREE_VALUE (list))
3342 return list;
3343 list = TREE_CHAIN (list);
3345 return NULL_TREE;
3348 /* Return first list element whose TREE_PURPOSE is ELEM.
3349 Return 0 if ELEM is not in LIST. */
3351 tree
3352 purpose_member (const_tree elem, tree list)
3354 while (list)
3356 if (elem == TREE_PURPOSE (list))
3357 return list;
3358 list = TREE_CHAIN (list);
3360 return NULL_TREE;
3363 /* Return true if ELEM is in V. */
3365 bool
3366 vec_member (const_tree elem, vec<tree, va_gc> *v)
3368 unsigned ix;
3369 tree t;
3370 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3371 if (elem == t)
3372 return true;
3373 return false;
3376 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3377 NULL_TREE. */
3379 tree
3380 chain_index (int idx, tree chain)
3382 for (; chain && idx > 0; --idx)
3383 chain = TREE_CHAIN (chain);
3384 return chain;
3387 /* Return true if ELEM is part of the chain CHAIN. */
3389 bool
3390 chain_member (const_tree elem, const_tree chain)
3392 while (chain)
3394 if (elem == chain)
3395 return true;
3396 chain = DECL_CHAIN (chain);
3399 return false;
3402 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3403 We expect a null pointer to mark the end of the chain.
3404 This is the Lisp primitive `length'. */
3407 list_length (const_tree t)
3409 const_tree p = t;
3410 #ifdef ENABLE_TREE_CHECKING
3411 const_tree q = t;
3412 #endif
3413 int len = 0;
3415 while (p)
3417 p = TREE_CHAIN (p);
3418 #ifdef ENABLE_TREE_CHECKING
3419 if (len % 2)
3420 q = TREE_CHAIN (q);
3421 gcc_assert (p != q);
3422 #endif
3423 len++;
3426 return len;
3429 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3430 UNION_TYPE TYPE, or NULL_TREE if none. */
3432 tree
3433 first_field (const_tree type)
3435 tree t = TYPE_FIELDS (type);
3436 while (t && TREE_CODE (t) != FIELD_DECL)
3437 t = TREE_CHAIN (t);
3438 return t;
3441 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3442 UNION_TYPE TYPE, or NULL_TREE if none. */
3444 tree
3445 last_field (const_tree type)
3447 tree last = NULL_TREE;
3449 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3451 if (TREE_CODE (fld) != FIELD_DECL)
3452 continue;
3454 last = fld;
3457 return last;
3460 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3461 by modifying the last node in chain 1 to point to chain 2.
3462 This is the Lisp primitive `nconc'. */
3464 tree
3465 chainon (tree op1, tree op2)
3467 tree t1;
3469 if (!op1)
3470 return op2;
3471 if (!op2)
3472 return op1;
3474 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3475 continue;
3476 TREE_CHAIN (t1) = op2;
3478 #ifdef ENABLE_TREE_CHECKING
3480 tree t2;
3481 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3482 gcc_assert (t2 != t1);
3484 #endif
3486 return op1;
3489 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3491 tree
3492 tree_last (tree chain)
3494 tree next;
3495 if (chain)
3496 while ((next = TREE_CHAIN (chain)))
3497 chain = next;
3498 return chain;
3501 /* Reverse the order of elements in the chain T,
3502 and return the new head of the chain (old last element). */
3504 tree
3505 nreverse (tree t)
3507 tree prev = 0, decl, next;
3508 for (decl = t; decl; decl = next)
3510 /* We shouldn't be using this function to reverse BLOCK chains; we
3511 have blocks_nreverse for that. */
3512 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3513 next = TREE_CHAIN (decl);
3514 TREE_CHAIN (decl) = prev;
3515 prev = decl;
3517 return prev;
3520 /* Return a newly created TREE_LIST node whose
3521 purpose and value fields are PARM and VALUE. */
3523 tree
3524 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3526 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3527 TREE_PURPOSE (t) = parm;
3528 TREE_VALUE (t) = value;
3529 return t;
3532 /* Build a chain of TREE_LIST nodes from a vector. */
3534 tree
3535 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3537 tree ret = NULL_TREE;
3538 tree *pp = &ret;
3539 unsigned int i;
3540 tree t;
3541 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3543 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3544 pp = &TREE_CHAIN (*pp);
3546 return ret;
3549 /* Return a newly created TREE_LIST node whose
3550 purpose and value fields are PURPOSE and VALUE
3551 and whose TREE_CHAIN is CHAIN. */
3553 tree
3554 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3556 tree node;
3558 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3559 memset (node, 0, sizeof (struct tree_common));
3561 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3563 TREE_SET_CODE (node, TREE_LIST);
3564 TREE_CHAIN (node) = chain;
3565 TREE_PURPOSE (node) = purpose;
3566 TREE_VALUE (node) = value;
3567 return node;
3570 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3571 trees. */
3573 vec<tree, va_gc> *
3574 ctor_to_vec (tree ctor)
3576 vec<tree, va_gc> *vec;
3577 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3578 unsigned int ix;
3579 tree val;
3581 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3582 vec->quick_push (val);
3584 return vec;
3587 /* Return the size nominally occupied by an object of type TYPE
3588 when it resides in memory. The value is measured in units of bytes,
3589 and its data type is that normally used for type sizes
3590 (which is the first type created by make_signed_type or
3591 make_unsigned_type). */
3593 tree
3594 size_in_bytes_loc (location_t loc, const_tree type)
3596 tree t;
3598 if (type == error_mark_node)
3599 return integer_zero_node;
3601 type = TYPE_MAIN_VARIANT (type);
3602 t = TYPE_SIZE_UNIT (type);
3604 if (t == 0)
3606 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3607 return size_zero_node;
3610 return t;
3613 /* Return the size of TYPE (in bytes) as a wide integer
3614 or return -1 if the size can vary or is larger than an integer. */
3616 HOST_WIDE_INT
3617 int_size_in_bytes (const_tree type)
3619 tree t;
3621 if (type == error_mark_node)
3622 return 0;
3624 type = TYPE_MAIN_VARIANT (type);
3625 t = TYPE_SIZE_UNIT (type);
3627 if (t && tree_fits_uhwi_p (t))
3628 return TREE_INT_CST_LOW (t);
3629 else
3630 return -1;
3633 /* Return the maximum size of TYPE (in bytes) as a wide integer
3634 or return -1 if the size can vary or is larger than an integer. */
3636 HOST_WIDE_INT
3637 max_int_size_in_bytes (const_tree type)
3639 HOST_WIDE_INT size = -1;
3640 tree size_tree;
3642 /* If this is an array type, check for a possible MAX_SIZE attached. */
3644 if (TREE_CODE (type) == ARRAY_TYPE)
3646 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3648 if (size_tree && tree_fits_uhwi_p (size_tree))
3649 size = tree_to_uhwi (size_tree);
3652 /* If we still haven't been able to get a size, see if the language
3653 can compute a maximum size. */
3655 if (size == -1)
3657 size_tree = lang_hooks.types.max_size (type);
3659 if (size_tree && tree_fits_uhwi_p (size_tree))
3660 size = tree_to_uhwi (size_tree);
3663 return size;
3666 /* Return the bit position of FIELD, in bits from the start of the record.
3667 This is a tree of type bitsizetype. */
3669 tree
3670 bit_position (const_tree field)
3672 return bit_from_pos (DECL_FIELD_OFFSET (field),
3673 DECL_FIELD_BIT_OFFSET (field));
3676 /* Return the byte position of FIELD, in bytes from the start of the record.
3677 This is a tree of type sizetype. */
3679 tree
3680 byte_position (const_tree field)
3682 return byte_from_pos (DECL_FIELD_OFFSET (field),
3683 DECL_FIELD_BIT_OFFSET (field));
3686 /* Likewise, but return as an integer. It must be representable in
3687 that way (since it could be a signed value, we don't have the
3688 option of returning -1 like int_size_in_byte can. */
3690 HOST_WIDE_INT
3691 int_byte_position (const_tree field)
3693 return tree_to_shwi (byte_position (field));
3696 /* Return, as a tree node, the number of elements for TYPE (which is an
3697 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3699 tree
3700 array_type_nelts (const_tree type)
3702 tree index_type, min, max;
3704 /* If they did it with unspecified bounds, then we should have already
3705 given an error about it before we got here. */
3706 if (! TYPE_DOMAIN (type))
3707 return error_mark_node;
3709 index_type = TYPE_DOMAIN (type);
3710 min = TYPE_MIN_VALUE (index_type);
3711 max = TYPE_MAX_VALUE (index_type);
3713 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3714 if (!max)
3716 /* zero sized arrays are represented from C FE as complete types with
3717 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3718 them as min 0, max -1. */
3719 if (COMPLETE_TYPE_P (type)
3720 && integer_zerop (TYPE_SIZE (type))
3721 && integer_zerop (min))
3722 return build_int_cst (TREE_TYPE (min), -1);
3724 return error_mark_node;
3727 return (integer_zerop (min)
3728 ? max
3729 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3732 /* If arg is static -- a reference to an object in static storage -- then
3733 return the object. This is not the same as the C meaning of `static'.
3734 If arg isn't static, return NULL. */
3736 tree
3737 staticp (tree arg)
3739 switch (TREE_CODE (arg))
3741 case FUNCTION_DECL:
3742 /* Nested functions are static, even though taking their address will
3743 involve a trampoline as we unnest the nested function and create
3744 the trampoline on the tree level. */
3745 return arg;
3747 case VAR_DECL:
3748 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3749 && ! DECL_THREAD_LOCAL_P (arg)
3750 && ! DECL_DLLIMPORT_P (arg)
3751 ? arg : NULL);
3753 case CONST_DECL:
3754 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3755 ? arg : NULL);
3757 case CONSTRUCTOR:
3758 return TREE_STATIC (arg) ? arg : NULL;
3760 case LABEL_DECL:
3761 case STRING_CST:
3762 return arg;
3764 case COMPONENT_REF:
3765 /* If the thing being referenced is not a field, then it is
3766 something language specific. */
3767 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3769 /* If we are referencing a bitfield, we can't evaluate an
3770 ADDR_EXPR at compile time and so it isn't a constant. */
3771 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3772 return NULL;
3774 return staticp (TREE_OPERAND (arg, 0));
3776 case BIT_FIELD_REF:
3777 return NULL;
3779 case INDIRECT_REF:
3780 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3782 case ARRAY_REF:
3783 case ARRAY_RANGE_REF:
3784 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3785 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3786 return staticp (TREE_OPERAND (arg, 0));
3787 else
3788 return NULL;
3790 case COMPOUND_LITERAL_EXPR:
3791 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3793 default:
3794 return NULL;
3801 /* Return whether OP is a DECL whose address is function-invariant. */
3803 bool
3804 decl_address_invariant_p (const_tree op)
3806 /* The conditions below are slightly less strict than the one in
3807 staticp. */
3809 switch (TREE_CODE (op))
3811 case PARM_DECL:
3812 case RESULT_DECL:
3813 case LABEL_DECL:
3814 case FUNCTION_DECL:
3815 return true;
3817 case VAR_DECL:
3818 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3819 || DECL_THREAD_LOCAL_P (op)
3820 || DECL_CONTEXT (op) == current_function_decl
3821 || decl_function_context (op) == current_function_decl)
3822 return true;
3823 break;
3825 case CONST_DECL:
3826 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3827 || decl_function_context (op) == current_function_decl)
3828 return true;
3829 break;
3831 default:
3832 break;
3835 return false;
3838 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3840 bool
3841 decl_address_ip_invariant_p (const_tree op)
3843 /* The conditions below are slightly less strict than the one in
3844 staticp. */
3846 switch (TREE_CODE (op))
3848 case LABEL_DECL:
3849 case FUNCTION_DECL:
3850 case STRING_CST:
3851 return true;
3853 case VAR_DECL:
3854 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3855 && !DECL_DLLIMPORT_P (op))
3856 || DECL_THREAD_LOCAL_P (op))
3857 return true;
3858 break;
3860 case CONST_DECL:
3861 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3862 return true;
3863 break;
3865 default:
3866 break;
3869 return false;
3873 /* Return true if T is function-invariant (internal function, does
3874 not handle arithmetic; that's handled in skip_simple_arithmetic and
3875 tree_invariant_p). */
3877 static bool
3878 tree_invariant_p_1 (tree t)
3880 tree op;
3882 if (TREE_CONSTANT (t)
3883 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3884 return true;
3886 switch (TREE_CODE (t))
3888 case SAVE_EXPR:
3889 return true;
3891 case ADDR_EXPR:
3892 op = TREE_OPERAND (t, 0);
3893 while (handled_component_p (op))
3895 switch (TREE_CODE (op))
3897 case ARRAY_REF:
3898 case ARRAY_RANGE_REF:
3899 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3900 || TREE_OPERAND (op, 2) != NULL_TREE
3901 || TREE_OPERAND (op, 3) != NULL_TREE)
3902 return false;
3903 break;
3905 case COMPONENT_REF:
3906 if (TREE_OPERAND (op, 2) != NULL_TREE)
3907 return false;
3908 break;
3910 default:;
3912 op = TREE_OPERAND (op, 0);
3915 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3917 default:
3918 break;
3921 return false;
3924 /* Return true if T is function-invariant. */
3926 bool
3927 tree_invariant_p (tree t)
3929 tree inner = skip_simple_arithmetic (t);
3930 return tree_invariant_p_1 (inner);
3933 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3934 Do this to any expression which may be used in more than one place,
3935 but must be evaluated only once.
3937 Normally, expand_expr would reevaluate the expression each time.
3938 Calling save_expr produces something that is evaluated and recorded
3939 the first time expand_expr is called on it. Subsequent calls to
3940 expand_expr just reuse the recorded value.
3942 The call to expand_expr that generates code that actually computes
3943 the value is the first call *at compile time*. Subsequent calls
3944 *at compile time* generate code to use the saved value.
3945 This produces correct result provided that *at run time* control
3946 always flows through the insns made by the first expand_expr
3947 before reaching the other places where the save_expr was evaluated.
3948 You, the caller of save_expr, must make sure this is so.
3950 Constants, and certain read-only nodes, are returned with no
3951 SAVE_EXPR because that is safe. Expressions containing placeholders
3952 are not touched; see tree.def for an explanation of what these
3953 are used for. */
3955 tree
3956 save_expr (tree expr)
3958 tree inner;
3960 /* If the tree evaluates to a constant, then we don't want to hide that
3961 fact (i.e. this allows further folding, and direct checks for constants).
3962 However, a read-only object that has side effects cannot be bypassed.
3963 Since it is no problem to reevaluate literals, we just return the
3964 literal node. */
3965 inner = skip_simple_arithmetic (expr);
3966 if (TREE_CODE (inner) == ERROR_MARK)
3967 return inner;
3969 if (tree_invariant_p_1 (inner))
3970 return expr;
3972 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3973 it means that the size or offset of some field of an object depends on
3974 the value within another field.
3976 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3977 and some variable since it would then need to be both evaluated once and
3978 evaluated more than once. Front-ends must assure this case cannot
3979 happen by surrounding any such subexpressions in their own SAVE_EXPR
3980 and forcing evaluation at the proper time. */
3981 if (contains_placeholder_p (inner))
3982 return expr;
3984 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3986 /* This expression might be placed ahead of a jump to ensure that the
3987 value was computed on both sides of the jump. So make sure it isn't
3988 eliminated as dead. */
3989 TREE_SIDE_EFFECTS (expr) = 1;
3990 return expr;
3993 /* Look inside EXPR into any simple arithmetic operations. Return the
3994 outermost non-arithmetic or non-invariant node. */
3996 tree
3997 skip_simple_arithmetic (tree expr)
3999 /* We don't care about whether this can be used as an lvalue in this
4000 context. */
4001 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4002 expr = TREE_OPERAND (expr, 0);
4004 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
4005 a constant, it will be more efficient to not make another SAVE_EXPR since
4006 it will allow better simplification and GCSE will be able to merge the
4007 computations if they actually occur. */
4008 while (true)
4010 if (UNARY_CLASS_P (expr))
4011 expr = TREE_OPERAND (expr, 0);
4012 else if (BINARY_CLASS_P (expr))
4014 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4015 expr = TREE_OPERAND (expr, 0);
4016 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4017 expr = TREE_OPERAND (expr, 1);
4018 else
4019 break;
4021 else
4022 break;
4025 return expr;
4028 /* Look inside EXPR into simple arithmetic operations involving constants.
4029 Return the outermost non-arithmetic or non-constant node. */
4031 tree
4032 skip_simple_constant_arithmetic (tree expr)
4034 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4035 expr = TREE_OPERAND (expr, 0);
4037 while (true)
4039 if (UNARY_CLASS_P (expr))
4040 expr = TREE_OPERAND (expr, 0);
4041 else if (BINARY_CLASS_P (expr))
4043 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4044 expr = TREE_OPERAND (expr, 0);
4045 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4046 expr = TREE_OPERAND (expr, 1);
4047 else
4048 break;
4050 else
4051 break;
4054 return expr;
4057 /* Return which tree structure is used by T. */
4059 enum tree_node_structure_enum
4060 tree_node_structure (const_tree t)
4062 const enum tree_code code = TREE_CODE (t);
4063 return tree_node_structure_for_code (code);
4066 /* Set various status flags when building a CALL_EXPR object T. */
4068 static void
4069 process_call_operands (tree t)
4071 bool side_effects = TREE_SIDE_EFFECTS (t);
4072 bool read_only = false;
4073 int i = call_expr_flags (t);
4075 /* Calls have side-effects, except those to const or pure functions. */
4076 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4077 side_effects = true;
4078 /* Propagate TREE_READONLY of arguments for const functions. */
4079 if (i & ECF_CONST)
4080 read_only = true;
4082 if (!side_effects || read_only)
4083 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4085 tree op = TREE_OPERAND (t, i);
4086 if (op && TREE_SIDE_EFFECTS (op))
4087 side_effects = true;
4088 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4089 read_only = false;
4092 TREE_SIDE_EFFECTS (t) = side_effects;
4093 TREE_READONLY (t) = read_only;
4096 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4097 size or offset that depends on a field within a record. */
4099 bool
4100 contains_placeholder_p (const_tree exp)
4102 enum tree_code code;
4104 if (!exp)
4105 return false;
4107 code = TREE_CODE (exp);
4108 if (code == PLACEHOLDER_EXPR)
4109 return true;
4111 switch (TREE_CODE_CLASS (code))
4113 case tcc_reference:
4114 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4115 position computations since they will be converted into a
4116 WITH_RECORD_EXPR involving the reference, which will assume
4117 here will be valid. */
4118 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4120 case tcc_exceptional:
4121 if (code == TREE_LIST)
4122 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4123 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4124 break;
4126 case tcc_unary:
4127 case tcc_binary:
4128 case tcc_comparison:
4129 case tcc_expression:
4130 switch (code)
4132 case COMPOUND_EXPR:
4133 /* Ignoring the first operand isn't quite right, but works best. */
4134 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4136 case COND_EXPR:
4137 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4138 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4139 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4141 case SAVE_EXPR:
4142 /* The save_expr function never wraps anything containing
4143 a PLACEHOLDER_EXPR. */
4144 return false;
4146 default:
4147 break;
4150 switch (TREE_CODE_LENGTH (code))
4152 case 1:
4153 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4154 case 2:
4155 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4156 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4157 default:
4158 return false;
4161 case tcc_vl_exp:
4162 switch (code)
4164 case CALL_EXPR:
4166 const_tree arg;
4167 const_call_expr_arg_iterator iter;
4168 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4169 if (CONTAINS_PLACEHOLDER_P (arg))
4170 return true;
4171 return false;
4173 default:
4174 return false;
4177 default:
4178 return false;
4180 return false;
4183 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4184 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4185 field positions. */
4187 static bool
4188 type_contains_placeholder_1 (const_tree type)
4190 /* If the size contains a placeholder or the parent type (component type in
4191 the case of arrays) type involves a placeholder, this type does. */
4192 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4193 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4194 || (!POINTER_TYPE_P (type)
4195 && TREE_TYPE (type)
4196 && type_contains_placeholder_p (TREE_TYPE (type))))
4197 return true;
4199 /* Now do type-specific checks. Note that the last part of the check above
4200 greatly limits what we have to do below. */
4201 switch (TREE_CODE (type))
4203 case VOID_TYPE:
4204 case OPAQUE_TYPE:
4205 case COMPLEX_TYPE:
4206 case ENUMERAL_TYPE:
4207 case BOOLEAN_TYPE:
4208 case POINTER_TYPE:
4209 case OFFSET_TYPE:
4210 case REFERENCE_TYPE:
4211 case METHOD_TYPE:
4212 case FUNCTION_TYPE:
4213 case VECTOR_TYPE:
4214 case NULLPTR_TYPE:
4215 return false;
4217 case INTEGER_TYPE:
4218 case REAL_TYPE:
4219 case FIXED_POINT_TYPE:
4220 /* Here we just check the bounds. */
4221 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4222 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4224 case ARRAY_TYPE:
4225 /* We have already checked the component type above, so just check
4226 the domain type. Flexible array members have a null domain. */
4227 return TYPE_DOMAIN (type) ?
4228 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4230 case RECORD_TYPE:
4231 case UNION_TYPE:
4232 case QUAL_UNION_TYPE:
4234 tree field;
4236 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4237 if (TREE_CODE (field) == FIELD_DECL
4238 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4239 || (TREE_CODE (type) == QUAL_UNION_TYPE
4240 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4241 || type_contains_placeholder_p (TREE_TYPE (field))))
4242 return true;
4244 return false;
4247 default:
4248 gcc_unreachable ();
4252 /* Wrapper around above function used to cache its result. */
4254 bool
4255 type_contains_placeholder_p (tree type)
4257 bool result;
4259 /* If the contains_placeholder_bits field has been initialized,
4260 then we know the answer. */
4261 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4262 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4264 /* Indicate that we've seen this type node, and the answer is false.
4265 This is what we want to return if we run into recursion via fields. */
4266 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4268 /* Compute the real value. */
4269 result = type_contains_placeholder_1 (type);
4271 /* Store the real value. */
4272 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4274 return result;
4277 /* Push tree EXP onto vector QUEUE if it is not already present. */
4279 static void
4280 push_without_duplicates (tree exp, vec<tree> *queue)
4282 unsigned int i;
4283 tree iter;
4285 FOR_EACH_VEC_ELT (*queue, i, iter)
4286 if (simple_cst_equal (iter, exp) == 1)
4287 break;
4289 if (!iter)
4290 queue->safe_push (exp);
4293 /* Given a tree EXP, find all occurrences of references to fields
4294 in a PLACEHOLDER_EXPR and place them in vector REFS without
4295 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4296 we assume here that EXP contains only arithmetic expressions
4297 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4298 argument list. */
4300 void
4301 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4303 enum tree_code code = TREE_CODE (exp);
4304 tree inner;
4305 int i;
4307 /* We handle TREE_LIST and COMPONENT_REF separately. */
4308 if (code == TREE_LIST)
4310 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4311 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4313 else if (code == COMPONENT_REF)
4315 for (inner = TREE_OPERAND (exp, 0);
4316 REFERENCE_CLASS_P (inner);
4317 inner = TREE_OPERAND (inner, 0))
4320 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4321 push_without_duplicates (exp, refs);
4322 else
4323 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4325 else
4326 switch (TREE_CODE_CLASS (code))
4328 case tcc_constant:
4329 break;
4331 case tcc_declaration:
4332 /* Variables allocated to static storage can stay. */
4333 if (!TREE_STATIC (exp))
4334 push_without_duplicates (exp, refs);
4335 break;
4337 case tcc_expression:
4338 /* This is the pattern built in ada/make_aligning_type. */
4339 if (code == ADDR_EXPR
4340 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4342 push_without_duplicates (exp, refs);
4343 break;
4346 /* Fall through. */
4348 case tcc_exceptional:
4349 case tcc_unary:
4350 case tcc_binary:
4351 case tcc_comparison:
4352 case tcc_reference:
4353 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4354 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4355 break;
4357 case tcc_vl_exp:
4358 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4359 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4360 break;
4362 default:
4363 gcc_unreachable ();
4367 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4368 return a tree with all occurrences of references to F in a
4369 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4370 CONST_DECLs. Note that we assume here that EXP contains only
4371 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4372 occurring only in their argument list. */
4374 tree
4375 substitute_in_expr (tree exp, tree f, tree r)
4377 enum tree_code code = TREE_CODE (exp);
4378 tree op0, op1, op2, op3;
4379 tree new_tree;
4381 /* We handle TREE_LIST and COMPONENT_REF separately. */
4382 if (code == TREE_LIST)
4384 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4385 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4386 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4387 return exp;
4389 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4391 else if (code == COMPONENT_REF)
4393 tree inner;
4395 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4396 and it is the right field, replace it with R. */
4397 for (inner = TREE_OPERAND (exp, 0);
4398 REFERENCE_CLASS_P (inner);
4399 inner = TREE_OPERAND (inner, 0))
4402 /* The field. */
4403 op1 = TREE_OPERAND (exp, 1);
4405 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4406 return r;
4408 /* If this expression hasn't been completed let, leave it alone. */
4409 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4410 return exp;
4412 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4413 if (op0 == TREE_OPERAND (exp, 0))
4414 return exp;
4416 new_tree
4417 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4419 else
4420 switch (TREE_CODE_CLASS (code))
4422 case tcc_constant:
4423 return exp;
4425 case tcc_declaration:
4426 if (exp == f)
4427 return r;
4428 else
4429 return exp;
4431 case tcc_expression:
4432 if (exp == f)
4433 return r;
4435 /* Fall through. */
4437 case tcc_exceptional:
4438 case tcc_unary:
4439 case tcc_binary:
4440 case tcc_comparison:
4441 case tcc_reference:
4442 switch (TREE_CODE_LENGTH (code))
4444 case 0:
4445 return exp;
4447 case 1:
4448 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4449 if (op0 == TREE_OPERAND (exp, 0))
4450 return exp;
4452 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4453 break;
4455 case 2:
4456 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4457 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4459 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4460 return exp;
4462 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4463 break;
4465 case 3:
4466 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4467 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4468 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4470 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4471 && op2 == TREE_OPERAND (exp, 2))
4472 return exp;
4474 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4475 break;
4477 case 4:
4478 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4479 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4480 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4481 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4483 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4484 && op2 == TREE_OPERAND (exp, 2)
4485 && op3 == TREE_OPERAND (exp, 3))
4486 return exp;
4488 new_tree
4489 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4490 break;
4492 default:
4493 gcc_unreachable ();
4495 break;
4497 case tcc_vl_exp:
4499 int i;
4501 new_tree = NULL_TREE;
4503 /* If we are trying to replace F with a constant or with another
4504 instance of one of the arguments of the call, inline back
4505 functions which do nothing else than computing a value from
4506 the arguments they are passed. This makes it possible to
4507 fold partially or entirely the replacement expression. */
4508 if (code == CALL_EXPR)
4510 bool maybe_inline = false;
4511 if (CONSTANT_CLASS_P (r))
4512 maybe_inline = true;
4513 else
4514 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4515 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4517 maybe_inline = true;
4518 break;
4520 if (maybe_inline)
4522 tree t = maybe_inline_call_in_expr (exp);
4523 if (t)
4524 return SUBSTITUTE_IN_EXPR (t, f, r);
4528 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4530 tree op = TREE_OPERAND (exp, i);
4531 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4532 if (new_op != op)
4534 if (!new_tree)
4535 new_tree = copy_node (exp);
4536 TREE_OPERAND (new_tree, i) = new_op;
4540 if (new_tree)
4542 new_tree = fold (new_tree);
4543 if (TREE_CODE (new_tree) == CALL_EXPR)
4544 process_call_operands (new_tree);
4546 else
4547 return exp;
4549 break;
4551 default:
4552 gcc_unreachable ();
4555 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4557 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4558 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4560 return new_tree;
4563 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4564 for it within OBJ, a tree that is an object or a chain of references. */
4566 tree
4567 substitute_placeholder_in_expr (tree exp, tree obj)
4569 enum tree_code code = TREE_CODE (exp);
4570 tree op0, op1, op2, op3;
4571 tree new_tree;
4573 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4574 in the chain of OBJ. */
4575 if (code == PLACEHOLDER_EXPR)
4577 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4578 tree elt;
4580 for (elt = obj; elt != 0;
4581 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4582 || TREE_CODE (elt) == COND_EXPR)
4583 ? TREE_OPERAND (elt, 1)
4584 : (REFERENCE_CLASS_P (elt)
4585 || UNARY_CLASS_P (elt)
4586 || BINARY_CLASS_P (elt)
4587 || VL_EXP_CLASS_P (elt)
4588 || EXPRESSION_CLASS_P (elt))
4589 ? TREE_OPERAND (elt, 0) : 0))
4590 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4591 return elt;
4593 for (elt = obj; elt != 0;
4594 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4595 || TREE_CODE (elt) == COND_EXPR)
4596 ? TREE_OPERAND (elt, 1)
4597 : (REFERENCE_CLASS_P (elt)
4598 || UNARY_CLASS_P (elt)
4599 || BINARY_CLASS_P (elt)
4600 || VL_EXP_CLASS_P (elt)
4601 || EXPRESSION_CLASS_P (elt))
4602 ? TREE_OPERAND (elt, 0) : 0))
4603 if (POINTER_TYPE_P (TREE_TYPE (elt))
4604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4605 == need_type))
4606 return fold_build1 (INDIRECT_REF, need_type, elt);
4608 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4609 survives until RTL generation, there will be an error. */
4610 return exp;
4613 /* TREE_LIST is special because we need to look at TREE_VALUE
4614 and TREE_CHAIN, not TREE_OPERANDS. */
4615 else if (code == TREE_LIST)
4617 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4618 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4619 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4620 return exp;
4622 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4624 else
4625 switch (TREE_CODE_CLASS (code))
4627 case tcc_constant:
4628 case tcc_declaration:
4629 return exp;
4631 case tcc_exceptional:
4632 case tcc_unary:
4633 case tcc_binary:
4634 case tcc_comparison:
4635 case tcc_expression:
4636 case tcc_reference:
4637 case tcc_statement:
4638 switch (TREE_CODE_LENGTH (code))
4640 case 0:
4641 return exp;
4643 case 1:
4644 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4645 if (op0 == TREE_OPERAND (exp, 0))
4646 return exp;
4648 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4649 break;
4651 case 2:
4652 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4653 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4655 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4656 return exp;
4658 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4659 break;
4661 case 3:
4662 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4663 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4664 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4667 && op2 == TREE_OPERAND (exp, 2))
4668 return exp;
4670 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4671 break;
4673 case 4:
4674 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4675 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4676 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4677 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4679 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4680 && op2 == TREE_OPERAND (exp, 2)
4681 && op3 == TREE_OPERAND (exp, 3))
4682 return exp;
4684 new_tree
4685 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4686 break;
4688 default:
4689 gcc_unreachable ();
4691 break;
4693 case tcc_vl_exp:
4695 int i;
4697 new_tree = NULL_TREE;
4699 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4701 tree op = TREE_OPERAND (exp, i);
4702 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4703 if (new_op != op)
4705 if (!new_tree)
4706 new_tree = copy_node (exp);
4707 TREE_OPERAND (new_tree, i) = new_op;
4711 if (new_tree)
4713 new_tree = fold (new_tree);
4714 if (TREE_CODE (new_tree) == CALL_EXPR)
4715 process_call_operands (new_tree);
4717 else
4718 return exp;
4720 break;
4722 default:
4723 gcc_unreachable ();
4726 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4728 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4729 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4731 return new_tree;
4735 /* Subroutine of stabilize_reference; this is called for subtrees of
4736 references. Any expression with side-effects must be put in a SAVE_EXPR
4737 to ensure that it is only evaluated once.
4739 We don't put SAVE_EXPR nodes around everything, because assigning very
4740 simple expressions to temporaries causes us to miss good opportunities
4741 for optimizations. Among other things, the opportunity to fold in the
4742 addition of a constant into an addressing mode often gets lost, e.g.
4743 "y[i+1] += x;". In general, we take the approach that we should not make
4744 an assignment unless we are forced into it - i.e., that any non-side effect
4745 operator should be allowed, and that cse should take care of coalescing
4746 multiple utterances of the same expression should that prove fruitful. */
4748 static tree
4749 stabilize_reference_1 (tree e)
4751 tree result;
4752 enum tree_code code = TREE_CODE (e);
4754 /* We cannot ignore const expressions because it might be a reference
4755 to a const array but whose index contains side-effects. But we can
4756 ignore things that are actual constant or that already have been
4757 handled by this function. */
4759 if (tree_invariant_p (e))
4760 return e;
4762 switch (TREE_CODE_CLASS (code))
4764 case tcc_exceptional:
4765 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4766 have side-effects. */
4767 if (code == STATEMENT_LIST)
4768 return save_expr (e);
4769 /* FALLTHRU */
4770 case tcc_type:
4771 case tcc_declaration:
4772 case tcc_comparison:
4773 case tcc_statement:
4774 case tcc_expression:
4775 case tcc_reference:
4776 case tcc_vl_exp:
4777 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4778 so that it will only be evaluated once. */
4779 /* The reference (r) and comparison (<) classes could be handled as
4780 below, but it is generally faster to only evaluate them once. */
4781 if (TREE_SIDE_EFFECTS (e))
4782 return save_expr (e);
4783 return e;
4785 case tcc_constant:
4786 /* Constants need no processing. In fact, we should never reach
4787 here. */
4788 return e;
4790 case tcc_binary:
4791 /* Division is slow and tends to be compiled with jumps,
4792 especially the division by powers of 2 that is often
4793 found inside of an array reference. So do it just once. */
4794 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4795 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4796 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4797 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4798 return save_expr (e);
4799 /* Recursively stabilize each operand. */
4800 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4801 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4802 break;
4804 case tcc_unary:
4805 /* Recursively stabilize each operand. */
4806 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4807 break;
4809 default:
4810 gcc_unreachable ();
4813 TREE_TYPE (result) = TREE_TYPE (e);
4814 TREE_READONLY (result) = TREE_READONLY (e);
4815 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4816 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4818 return result;
4821 /* Stabilize a reference so that we can use it any number of times
4822 without causing its operands to be evaluated more than once.
4823 Returns the stabilized reference. This works by means of save_expr,
4824 so see the caveats in the comments about save_expr.
4826 Also allows conversion expressions whose operands are references.
4827 Any other kind of expression is returned unchanged. */
4829 tree
4830 stabilize_reference (tree ref)
4832 tree result;
4833 enum tree_code code = TREE_CODE (ref);
4835 switch (code)
4837 case VAR_DECL:
4838 case PARM_DECL:
4839 case RESULT_DECL:
4840 /* No action is needed in this case. */
4841 return ref;
4843 CASE_CONVERT:
4844 case FLOAT_EXPR:
4845 case FIX_TRUNC_EXPR:
4846 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4847 break;
4849 case INDIRECT_REF:
4850 result = build_nt (INDIRECT_REF,
4851 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4852 break;
4854 case COMPONENT_REF:
4855 result = build_nt (COMPONENT_REF,
4856 stabilize_reference (TREE_OPERAND (ref, 0)),
4857 TREE_OPERAND (ref, 1), NULL_TREE);
4858 break;
4860 case BIT_FIELD_REF:
4861 result = build_nt (BIT_FIELD_REF,
4862 stabilize_reference (TREE_OPERAND (ref, 0)),
4863 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4864 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4865 break;
4867 case ARRAY_REF:
4868 result = build_nt (ARRAY_REF,
4869 stabilize_reference (TREE_OPERAND (ref, 0)),
4870 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4871 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4872 break;
4874 case ARRAY_RANGE_REF:
4875 result = build_nt (ARRAY_RANGE_REF,
4876 stabilize_reference (TREE_OPERAND (ref, 0)),
4877 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4878 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4879 break;
4881 case COMPOUND_EXPR:
4882 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4883 it wouldn't be ignored. This matters when dealing with
4884 volatiles. */
4885 return stabilize_reference_1 (ref);
4887 /* If arg isn't a kind of lvalue we recognize, make no change.
4888 Caller should recognize the error for an invalid lvalue. */
4889 default:
4890 return ref;
4892 case ERROR_MARK:
4893 return error_mark_node;
4896 TREE_TYPE (result) = TREE_TYPE (ref);
4897 TREE_READONLY (result) = TREE_READONLY (ref);
4898 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4899 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4900 protected_set_expr_location (result, EXPR_LOCATION (ref));
4902 return result;
4905 /* Low-level constructors for expressions. */
4907 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4908 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4910 void
4911 recompute_tree_invariant_for_addr_expr (tree t)
4913 tree node;
4914 bool tc = true, se = false;
4916 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4918 /* We started out assuming this address is both invariant and constant, but
4919 does not have side effects. Now go down any handled components and see if
4920 any of them involve offsets that are either non-constant or non-invariant.
4921 Also check for side-effects.
4923 ??? Note that this code makes no attempt to deal with the case where
4924 taking the address of something causes a copy due to misalignment. */
4926 #define UPDATE_FLAGS(NODE) \
4927 do { tree _node = (NODE); \
4928 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4929 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4931 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4932 node = TREE_OPERAND (node, 0))
4934 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4935 array reference (probably made temporarily by the G++ front end),
4936 so ignore all the operands. */
4937 if ((TREE_CODE (node) == ARRAY_REF
4938 || TREE_CODE (node) == ARRAY_RANGE_REF)
4939 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4941 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4942 if (TREE_OPERAND (node, 2))
4943 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4944 if (TREE_OPERAND (node, 3))
4945 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4947 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4948 FIELD_DECL, apparently. The G++ front end can put something else
4949 there, at least temporarily. */
4950 else if (TREE_CODE (node) == COMPONENT_REF
4951 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4953 if (TREE_OPERAND (node, 2))
4954 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4958 node = lang_hooks.expr_to_decl (node, &tc, &se);
4960 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4961 the address, since &(*a)->b is a form of addition. If it's a constant, the
4962 address is constant too. If it's a decl, its address is constant if the
4963 decl is static. Everything else is not constant and, furthermore,
4964 taking the address of a volatile variable is not volatile. */
4965 if (INDIRECT_REF_P (node)
4966 || TREE_CODE (node) == MEM_REF)
4967 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4968 else if (CONSTANT_CLASS_P (node))
4970 else if (DECL_P (node))
4971 tc &= (staticp (node) != NULL_TREE);
4972 else
4974 tc = false;
4975 se |= TREE_SIDE_EFFECTS (node);
4979 TREE_CONSTANT (t) = tc;
4980 TREE_SIDE_EFFECTS (t) = se;
4981 #undef UPDATE_FLAGS
4984 /* Build an expression of code CODE, data type TYPE, and operands as
4985 specified. Expressions and reference nodes can be created this way.
4986 Constants, decls, types and misc nodes cannot be.
4988 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4989 enough for all extant tree codes. */
4991 tree
4992 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4994 tree t;
4996 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4998 t = make_node (code PASS_MEM_STAT);
4999 TREE_TYPE (t) = tt;
5001 return t;
5004 tree
5005 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
5007 int length = sizeof (struct tree_exp);
5008 tree t;
5010 record_node_allocation_statistics (code, length);
5012 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5014 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5016 memset (t, 0, sizeof (struct tree_common));
5018 TREE_SET_CODE (t, code);
5020 TREE_TYPE (t) = type;
5021 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5022 TREE_OPERAND (t, 0) = node;
5023 if (node && !TYPE_P (node))
5025 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5026 TREE_READONLY (t) = TREE_READONLY (node);
5029 if (TREE_CODE_CLASS (code) == tcc_statement)
5031 if (code != DEBUG_BEGIN_STMT)
5032 TREE_SIDE_EFFECTS (t) = 1;
5034 else switch (code)
5036 case VA_ARG_EXPR:
5037 /* All of these have side-effects, no matter what their
5038 operands are. */
5039 TREE_SIDE_EFFECTS (t) = 1;
5040 TREE_READONLY (t) = 0;
5041 break;
5043 case INDIRECT_REF:
5044 /* Whether a dereference is readonly has nothing to do with whether
5045 its operand is readonly. */
5046 TREE_READONLY (t) = 0;
5047 break;
5049 case ADDR_EXPR:
5050 if (node)
5051 recompute_tree_invariant_for_addr_expr (t);
5052 break;
5054 default:
5055 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5056 && node && !TYPE_P (node)
5057 && TREE_CONSTANT (node))
5058 TREE_CONSTANT (t) = 1;
5059 if (TREE_CODE_CLASS (code) == tcc_reference
5060 && node && TREE_THIS_VOLATILE (node))
5061 TREE_THIS_VOLATILE (t) = 1;
5062 break;
5065 return t;
5068 #define PROCESS_ARG(N) \
5069 do { \
5070 TREE_OPERAND (t, N) = arg##N; \
5071 if (arg##N &&!TYPE_P (arg##N)) \
5073 if (TREE_SIDE_EFFECTS (arg##N)) \
5074 side_effects = 1; \
5075 if (!TREE_READONLY (arg##N) \
5076 && !CONSTANT_CLASS_P (arg##N)) \
5077 (void) (read_only = 0); \
5078 if (!TREE_CONSTANT (arg##N)) \
5079 (void) (constant = 0); \
5081 } while (0)
5083 tree
5084 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5086 bool constant, read_only, side_effects, div_by_zero;
5087 tree t;
5089 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5091 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5092 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5093 /* When sizetype precision doesn't match that of pointers
5094 we need to be able to build explicit extensions or truncations
5095 of the offset argument. */
5096 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5097 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5098 && TREE_CODE (arg1) == INTEGER_CST);
5100 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5101 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5102 && ptrofftype_p (TREE_TYPE (arg1)));
5104 t = make_node (code PASS_MEM_STAT);
5105 TREE_TYPE (t) = tt;
5107 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5108 result based on those same flags for the arguments. But if the
5109 arguments aren't really even `tree' expressions, we shouldn't be trying
5110 to do this. */
5112 /* Expressions without side effects may be constant if their
5113 arguments are as well. */
5114 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5115 || TREE_CODE_CLASS (code) == tcc_binary);
5116 read_only = 1;
5117 side_effects = TREE_SIDE_EFFECTS (t);
5119 switch (code)
5121 case TRUNC_DIV_EXPR:
5122 case CEIL_DIV_EXPR:
5123 case FLOOR_DIV_EXPR:
5124 case ROUND_DIV_EXPR:
5125 case EXACT_DIV_EXPR:
5126 case CEIL_MOD_EXPR:
5127 case FLOOR_MOD_EXPR:
5128 case ROUND_MOD_EXPR:
5129 case TRUNC_MOD_EXPR:
5130 div_by_zero = integer_zerop (arg1);
5131 break;
5132 default:
5133 div_by_zero = false;
5136 PROCESS_ARG (0);
5137 PROCESS_ARG (1);
5139 TREE_SIDE_EFFECTS (t) = side_effects;
5140 if (code == MEM_REF)
5142 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5144 tree o = TREE_OPERAND (arg0, 0);
5145 TREE_READONLY (t) = TREE_READONLY (o);
5146 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5149 else
5151 TREE_READONLY (t) = read_only;
5152 /* Don't mark X / 0 as constant. */
5153 TREE_CONSTANT (t) = constant && !div_by_zero;
5154 TREE_THIS_VOLATILE (t)
5155 = (TREE_CODE_CLASS (code) == tcc_reference
5156 && arg0 && TREE_THIS_VOLATILE (arg0));
5159 return t;
5163 tree
5164 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5165 tree arg2 MEM_STAT_DECL)
5167 bool constant, read_only, side_effects;
5168 tree t;
5170 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5171 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5173 t = make_node (code PASS_MEM_STAT);
5174 TREE_TYPE (t) = tt;
5176 read_only = 1;
5178 /* As a special exception, if COND_EXPR has NULL branches, we
5179 assume that it is a gimple statement and always consider
5180 it to have side effects. */
5181 if (code == COND_EXPR
5182 && tt == void_type_node
5183 && arg1 == NULL_TREE
5184 && arg2 == NULL_TREE)
5185 side_effects = true;
5186 else
5187 side_effects = TREE_SIDE_EFFECTS (t);
5189 PROCESS_ARG (0);
5190 PROCESS_ARG (1);
5191 PROCESS_ARG (2);
5193 if (code == COND_EXPR)
5194 TREE_READONLY (t) = read_only;
5196 TREE_SIDE_EFFECTS (t) = side_effects;
5197 TREE_THIS_VOLATILE (t)
5198 = (TREE_CODE_CLASS (code) == tcc_reference
5199 && arg0 && TREE_THIS_VOLATILE (arg0));
5201 return t;
5204 tree
5205 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5206 tree arg2, tree arg3 MEM_STAT_DECL)
5208 bool constant, read_only, side_effects;
5209 tree t;
5211 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5213 t = make_node (code PASS_MEM_STAT);
5214 TREE_TYPE (t) = tt;
5216 side_effects = TREE_SIDE_EFFECTS (t);
5218 PROCESS_ARG (0);
5219 PROCESS_ARG (1);
5220 PROCESS_ARG (2);
5221 PROCESS_ARG (3);
5223 TREE_SIDE_EFFECTS (t) = side_effects;
5224 TREE_THIS_VOLATILE (t)
5225 = (TREE_CODE_CLASS (code) == tcc_reference
5226 && arg0 && TREE_THIS_VOLATILE (arg0));
5228 return t;
5231 tree
5232 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5233 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5235 bool constant, read_only, side_effects;
5236 tree t;
5238 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5240 t = make_node (code PASS_MEM_STAT);
5241 TREE_TYPE (t) = tt;
5243 side_effects = TREE_SIDE_EFFECTS (t);
5245 PROCESS_ARG (0);
5246 PROCESS_ARG (1);
5247 PROCESS_ARG (2);
5248 PROCESS_ARG (3);
5249 PROCESS_ARG (4);
5251 TREE_SIDE_EFFECTS (t) = side_effects;
5252 if (code == TARGET_MEM_REF)
5254 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5256 tree o = TREE_OPERAND (arg0, 0);
5257 TREE_READONLY (t) = TREE_READONLY (o);
5258 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5261 else
5262 TREE_THIS_VOLATILE (t)
5263 = (TREE_CODE_CLASS (code) == tcc_reference
5264 && arg0 && TREE_THIS_VOLATILE (arg0));
5266 return t;
5269 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5270 on the pointer PTR. */
5272 tree
5273 build_simple_mem_ref_loc (location_t loc, tree ptr)
5275 poly_int64 offset = 0;
5276 tree ptype = TREE_TYPE (ptr);
5277 tree tem;
5278 /* For convenience allow addresses that collapse to a simple base
5279 and offset. */
5280 if (TREE_CODE (ptr) == ADDR_EXPR
5281 && (handled_component_p (TREE_OPERAND (ptr, 0))
5282 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5284 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5285 gcc_assert (ptr);
5286 if (TREE_CODE (ptr) == MEM_REF)
5288 offset += mem_ref_offset (ptr).force_shwi ();
5289 ptr = TREE_OPERAND (ptr, 0);
5291 else
5292 ptr = build_fold_addr_expr (ptr);
5293 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5295 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5296 ptr, build_int_cst (ptype, offset));
5297 SET_EXPR_LOCATION (tem, loc);
5298 return tem;
5301 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5303 poly_offset_int
5304 mem_ref_offset (const_tree t)
5306 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5307 SIGNED);
5310 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5311 offsetted by OFFSET units. */
5313 tree
5314 build_invariant_address (tree type, tree base, poly_int64 offset)
5316 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5317 build_fold_addr_expr (base),
5318 build_int_cst (ptr_type_node, offset));
5319 tree addr = build1 (ADDR_EXPR, type, ref);
5320 recompute_tree_invariant_for_addr_expr (addr);
5321 return addr;
5324 /* Similar except don't specify the TREE_TYPE
5325 and leave the TREE_SIDE_EFFECTS as 0.
5326 It is permissible for arguments to be null,
5327 or even garbage if their values do not matter. */
5329 tree
5330 build_nt (enum tree_code code, ...)
5332 tree t;
5333 int length;
5334 int i;
5335 va_list p;
5337 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5339 va_start (p, code);
5341 t = make_node (code);
5342 length = TREE_CODE_LENGTH (code);
5344 for (i = 0; i < length; i++)
5345 TREE_OPERAND (t, i) = va_arg (p, tree);
5347 va_end (p);
5348 return t;
5351 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5352 tree vec. */
5354 tree
5355 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5357 tree ret, t;
5358 unsigned int ix;
5360 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5361 CALL_EXPR_FN (ret) = fn;
5362 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5363 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5364 CALL_EXPR_ARG (ret, ix) = t;
5365 return ret;
5368 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5369 and data type TYPE.
5370 We do NOT enter this node in any sort of symbol table.
5372 LOC is the location of the decl.
5374 layout_decl is used to set up the decl's storage layout.
5375 Other slots are initialized to 0 or null pointers. */
5377 tree
5378 build_decl (location_t loc, enum tree_code code, tree name,
5379 tree type MEM_STAT_DECL)
5381 tree t;
5383 t = make_node (code PASS_MEM_STAT);
5384 DECL_SOURCE_LOCATION (t) = loc;
5386 /* if (type == error_mark_node)
5387 type = integer_type_node; */
5388 /* That is not done, deliberately, so that having error_mark_node
5389 as the type can suppress useless errors in the use of this variable. */
5391 DECL_NAME (t) = name;
5392 TREE_TYPE (t) = type;
5394 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5395 layout_decl (t, 0);
5397 return t;
5400 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5402 tree
5403 build_debug_expr_decl (tree type)
5405 tree vexpr = make_node (DEBUG_EXPR_DECL);
5406 DECL_ARTIFICIAL (vexpr) = 1;
5407 TREE_TYPE (vexpr) = type;
5408 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5409 return vexpr;
5412 /* Builds and returns function declaration with NAME and TYPE. */
5414 tree
5415 build_fn_decl (const char *name, tree type)
5417 tree id = get_identifier (name);
5418 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5420 DECL_EXTERNAL (decl) = 1;
5421 TREE_PUBLIC (decl) = 1;
5422 DECL_ARTIFICIAL (decl) = 1;
5423 TREE_NOTHROW (decl) = 1;
5425 return decl;
5428 vec<tree, va_gc> *all_translation_units;
5430 /* Builds a new translation-unit decl with name NAME, queues it in the
5431 global list of translation-unit decls and returns it. */
5433 tree
5434 build_translation_unit_decl (tree name)
5436 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5437 name, NULL_TREE);
5438 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5439 vec_safe_push (all_translation_units, tu);
5440 return tu;
5444 /* BLOCK nodes are used to represent the structure of binding contours
5445 and declarations, once those contours have been exited and their contents
5446 compiled. This information is used for outputting debugging info. */
5448 tree
5449 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5451 tree block = make_node (BLOCK);
5453 BLOCK_VARS (block) = vars;
5454 BLOCK_SUBBLOCKS (block) = subblocks;
5455 BLOCK_SUPERCONTEXT (block) = supercontext;
5456 BLOCK_CHAIN (block) = chain;
5457 return block;
5461 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5463 LOC is the location to use in tree T. */
5465 void
5466 protected_set_expr_location (tree t, location_t loc)
5468 if (CAN_HAVE_LOCATION_P (t))
5469 SET_EXPR_LOCATION (t, loc);
5470 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5472 t = expr_single (t);
5473 if (t && CAN_HAVE_LOCATION_P (t))
5474 SET_EXPR_LOCATION (t, loc);
5478 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5479 UNKNOWN_LOCATION. */
5481 void
5482 protected_set_expr_location_if_unset (tree t, location_t loc)
5484 t = expr_single (t);
5485 if (t && !EXPR_HAS_LOCATION (t))
5486 protected_set_expr_location (t, loc);
5489 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5490 of the various TYPE_QUAL values. */
5492 static void
5493 set_type_quals (tree type, int type_quals)
5495 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5496 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5497 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5498 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5499 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5502 /* Returns true iff CAND and BASE have equivalent language-specific
5503 qualifiers. */
5505 bool
5506 check_lang_type (const_tree cand, const_tree base)
5508 if (lang_hooks.types.type_hash_eq == NULL)
5509 return true;
5510 /* type_hash_eq currently only applies to these types. */
5511 if (TREE_CODE (cand) != FUNCTION_TYPE
5512 && TREE_CODE (cand) != METHOD_TYPE)
5513 return true;
5514 return lang_hooks.types.type_hash_eq (cand, base);
5517 /* This function checks to see if TYPE matches the size one of the built-in
5518 atomic types, and returns that core atomic type. */
5520 static tree
5521 find_atomic_core_type (const_tree type)
5523 tree base_atomic_type;
5525 /* Only handle complete types. */
5526 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5527 return NULL_TREE;
5529 switch (tree_to_uhwi (TYPE_SIZE (type)))
5531 case 8:
5532 base_atomic_type = atomicQI_type_node;
5533 break;
5535 case 16:
5536 base_atomic_type = atomicHI_type_node;
5537 break;
5539 case 32:
5540 base_atomic_type = atomicSI_type_node;
5541 break;
5543 case 64:
5544 base_atomic_type = atomicDI_type_node;
5545 break;
5547 case 128:
5548 base_atomic_type = atomicTI_type_node;
5549 break;
5551 default:
5552 base_atomic_type = NULL_TREE;
5555 return base_atomic_type;
5558 /* Returns true iff unqualified CAND and BASE are equivalent. */
5560 bool
5561 check_base_type (const_tree cand, const_tree base)
5563 if (TYPE_NAME (cand) != TYPE_NAME (base)
5564 /* Apparently this is needed for Objective-C. */
5565 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5566 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5567 TYPE_ATTRIBUTES (base)))
5568 return false;
5569 /* Check alignment. */
5570 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5571 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5572 return true;
5573 /* Atomic types increase minimal alignment. We must to do so as well
5574 or we get duplicated canonical types. See PR88686. */
5575 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5577 /* See if this object can map to a basic atomic type. */
5578 tree atomic_type = find_atomic_core_type (cand);
5579 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5580 return true;
5582 return false;
5585 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5587 bool
5588 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5590 return (TYPE_QUALS (cand) == type_quals
5591 && check_base_type (cand, base)
5592 && check_lang_type (cand, base));
5595 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5597 static bool
5598 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5600 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5601 && TYPE_NAME (cand) == TYPE_NAME (base)
5602 /* Apparently this is needed for Objective-C. */
5603 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5604 /* Check alignment. */
5605 && TYPE_ALIGN (cand) == align
5606 /* Check this is a user-aligned type as build_aligned_type
5607 would create. */
5608 && TYPE_USER_ALIGN (cand)
5609 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5610 TYPE_ATTRIBUTES (base))
5611 && check_lang_type (cand, base));
5614 /* Return a version of the TYPE, qualified as indicated by the
5615 TYPE_QUALS, if one exists. If no qualified version exists yet,
5616 return NULL_TREE. */
5618 tree
5619 get_qualified_type (tree type, int type_quals)
5621 if (TYPE_QUALS (type) == type_quals)
5622 return type;
5624 tree mv = TYPE_MAIN_VARIANT (type);
5625 if (check_qualified_type (mv, type, type_quals))
5626 return mv;
5628 /* Search the chain of variants to see if there is already one there just
5629 like the one we need to have. If so, use that existing one. We must
5630 preserve the TYPE_NAME, since there is code that depends on this. */
5631 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5632 if (check_qualified_type (*tp, type, type_quals))
5634 /* Put the found variant at the head of the variant list so
5635 frequently searched variants get found faster. The C++ FE
5636 benefits greatly from this. */
5637 tree t = *tp;
5638 *tp = TYPE_NEXT_VARIANT (t);
5639 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5640 TYPE_NEXT_VARIANT (mv) = t;
5641 return t;
5644 return NULL_TREE;
5647 /* Like get_qualified_type, but creates the type if it does not
5648 exist. This function never returns NULL_TREE. */
5650 tree
5651 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5653 tree t;
5655 /* See if we already have the appropriate qualified variant. */
5656 t = get_qualified_type (type, type_quals);
5658 /* If not, build it. */
5659 if (!t)
5661 t = build_variant_type_copy (type PASS_MEM_STAT);
5662 set_type_quals (t, type_quals);
5664 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5666 /* See if this object can map to a basic atomic type. */
5667 tree atomic_type = find_atomic_core_type (type);
5668 if (atomic_type)
5670 /* Ensure the alignment of this type is compatible with
5671 the required alignment of the atomic type. */
5672 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5673 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5677 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5678 /* Propagate structural equality. */
5679 SET_TYPE_STRUCTURAL_EQUALITY (t);
5680 else if (TYPE_CANONICAL (type) != type)
5681 /* Build the underlying canonical type, since it is different
5682 from TYPE. */
5684 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5685 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5687 else
5688 /* T is its own canonical type. */
5689 TYPE_CANONICAL (t) = t;
5693 return t;
5696 /* Create a variant of type T with alignment ALIGN. */
5698 tree
5699 build_aligned_type (tree type, unsigned int align)
5701 tree t;
5703 if (TYPE_PACKED (type)
5704 || TYPE_ALIGN (type) == align)
5705 return type;
5707 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5708 if (check_aligned_type (t, type, align))
5709 return t;
5711 t = build_variant_type_copy (type);
5712 SET_TYPE_ALIGN (t, align);
5713 TYPE_USER_ALIGN (t) = 1;
5715 return t;
5718 /* Create a new distinct copy of TYPE. The new type is made its own
5719 MAIN_VARIANT. If TYPE requires structural equality checks, the
5720 resulting type requires structural equality checks; otherwise, its
5721 TYPE_CANONICAL points to itself. */
5723 tree
5724 build_distinct_type_copy (tree type MEM_STAT_DECL)
5726 tree t = copy_node (type PASS_MEM_STAT);
5728 TYPE_POINTER_TO (t) = 0;
5729 TYPE_REFERENCE_TO (t) = 0;
5731 /* Set the canonical type either to a new equivalence class, or
5732 propagate the need for structural equality checks. */
5733 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5734 SET_TYPE_STRUCTURAL_EQUALITY (t);
5735 else
5736 TYPE_CANONICAL (t) = t;
5738 /* Make it its own variant. */
5739 TYPE_MAIN_VARIANT (t) = t;
5740 TYPE_NEXT_VARIANT (t) = 0;
5742 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5743 whose TREE_TYPE is not t. This can also happen in the Ada
5744 frontend when using subtypes. */
5746 return t;
5749 /* Create a new variant of TYPE, equivalent but distinct. This is so
5750 the caller can modify it. TYPE_CANONICAL for the return type will
5751 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5752 are considered equal by the language itself (or that both types
5753 require structural equality checks). */
5755 tree
5756 build_variant_type_copy (tree type MEM_STAT_DECL)
5758 tree t, m = TYPE_MAIN_VARIANT (type);
5760 t = build_distinct_type_copy (type PASS_MEM_STAT);
5762 /* Since we're building a variant, assume that it is a non-semantic
5763 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5764 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5765 /* Type variants have no alias set defined. */
5766 TYPE_ALIAS_SET (t) = -1;
5768 /* Add the new type to the chain of variants of TYPE. */
5769 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5770 TYPE_NEXT_VARIANT (m) = t;
5771 TYPE_MAIN_VARIANT (t) = m;
5773 return t;
5776 /* Return true if the from tree in both tree maps are equal. */
5779 tree_map_base_eq (const void *va, const void *vb)
5781 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5782 *const b = (const struct tree_map_base *) vb;
5783 return (a->from == b->from);
5786 /* Hash a from tree in a tree_base_map. */
5788 unsigned int
5789 tree_map_base_hash (const void *item)
5791 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5794 /* Return true if this tree map structure is marked for garbage collection
5795 purposes. We simply return true if the from tree is marked, so that this
5796 structure goes away when the from tree goes away. */
5798 bool
5799 tree_map_base_marked_p (const void *p)
5801 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5804 /* Hash a from tree in a tree_map. */
5806 unsigned int
5807 tree_map_hash (const void *item)
5809 return (((const struct tree_map *) item)->hash);
5812 /* Hash a from tree in a tree_decl_map. */
5814 unsigned int
5815 tree_decl_map_hash (const void *item)
5817 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5820 /* Return the initialization priority for DECL. */
5822 priority_type
5823 decl_init_priority_lookup (tree decl)
5825 symtab_node *snode = symtab_node::get (decl);
5827 if (!snode)
5828 return DEFAULT_INIT_PRIORITY;
5829 return
5830 snode->get_init_priority ();
5833 /* Return the finalization priority for DECL. */
5835 priority_type
5836 decl_fini_priority_lookup (tree decl)
5838 cgraph_node *node = cgraph_node::get (decl);
5840 if (!node)
5841 return DEFAULT_INIT_PRIORITY;
5842 return
5843 node->get_fini_priority ();
5846 /* Set the initialization priority for DECL to PRIORITY. */
5848 void
5849 decl_init_priority_insert (tree decl, priority_type priority)
5851 struct symtab_node *snode;
5853 if (priority == DEFAULT_INIT_PRIORITY)
5855 snode = symtab_node::get (decl);
5856 if (!snode)
5857 return;
5859 else if (VAR_P (decl))
5860 snode = varpool_node::get_create (decl);
5861 else
5862 snode = cgraph_node::get_create (decl);
5863 snode->set_init_priority (priority);
5866 /* Set the finalization priority for DECL to PRIORITY. */
5868 void
5869 decl_fini_priority_insert (tree decl, priority_type priority)
5871 struct cgraph_node *node;
5873 if (priority == DEFAULT_INIT_PRIORITY)
5875 node = cgraph_node::get (decl);
5876 if (!node)
5877 return;
5879 else
5880 node = cgraph_node::get_create (decl);
5881 node->set_fini_priority (priority);
5884 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5886 static void
5887 print_debug_expr_statistics (void)
5889 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5890 (long) debug_expr_for_decl->size (),
5891 (long) debug_expr_for_decl->elements (),
5892 debug_expr_for_decl->collisions ());
5895 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5897 static void
5898 print_value_expr_statistics (void)
5900 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5901 (long) value_expr_for_decl->size (),
5902 (long) value_expr_for_decl->elements (),
5903 value_expr_for_decl->collisions ());
5906 /* Lookup a debug expression for FROM, and return it if we find one. */
5908 tree
5909 decl_debug_expr_lookup (tree from)
5911 struct tree_decl_map *h, in;
5912 in.base.from = from;
5914 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5915 if (h)
5916 return h->to;
5917 return NULL_TREE;
5920 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5922 void
5923 decl_debug_expr_insert (tree from, tree to)
5925 struct tree_decl_map *h;
5927 h = ggc_alloc<tree_decl_map> ();
5928 h->base.from = from;
5929 h->to = to;
5930 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5933 /* Lookup a value expression for FROM, and return it if we find one. */
5935 tree
5936 decl_value_expr_lookup (tree from)
5938 struct tree_decl_map *h, in;
5939 in.base.from = from;
5941 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5942 if (h)
5943 return h->to;
5944 return NULL_TREE;
5947 /* Insert a mapping FROM->TO in the value expression hashtable. */
5949 void
5950 decl_value_expr_insert (tree from, tree to)
5952 struct tree_decl_map *h;
5954 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5955 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5957 h = ggc_alloc<tree_decl_map> ();
5958 h->base.from = from;
5959 h->to = to;
5960 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5963 /* Lookup a vector of debug arguments for FROM, and return it if we
5964 find one. */
5966 vec<tree, va_gc> **
5967 decl_debug_args_lookup (tree from)
5969 struct tree_vec_map *h, in;
5971 if (!DECL_HAS_DEBUG_ARGS_P (from))
5972 return NULL;
5973 gcc_checking_assert (debug_args_for_decl != NULL);
5974 in.base.from = from;
5975 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5976 if (h)
5977 return &h->to;
5978 return NULL;
5981 /* Insert a mapping FROM->empty vector of debug arguments in the value
5982 expression hashtable. */
5984 vec<tree, va_gc> **
5985 decl_debug_args_insert (tree from)
5987 struct tree_vec_map *h;
5988 tree_vec_map **loc;
5990 if (DECL_HAS_DEBUG_ARGS_P (from))
5991 return decl_debug_args_lookup (from);
5992 if (debug_args_for_decl == NULL)
5993 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5994 h = ggc_alloc<tree_vec_map> ();
5995 h->base.from = from;
5996 h->to = NULL;
5997 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5998 *loc = h;
5999 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6000 return &h->to;
6003 /* Hashing of types so that we don't make duplicates.
6004 The entry point is `type_hash_canon'. */
6006 /* Generate the default hash code for TYPE. This is designed for
6007 speed, rather than maximum entropy. */
6009 hashval_t
6010 type_hash_canon_hash (tree type)
6012 inchash::hash hstate;
6014 hstate.add_int (TREE_CODE (type));
6016 if (TREE_TYPE (type))
6017 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6019 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6020 /* Just the identifier is adequate to distinguish. */
6021 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6023 switch (TREE_CODE (type))
6025 case METHOD_TYPE:
6026 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6027 /* FALLTHROUGH. */
6028 case FUNCTION_TYPE:
6029 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6030 if (TREE_VALUE (t) != error_mark_node)
6031 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6032 break;
6034 case OFFSET_TYPE:
6035 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6036 break;
6038 case ARRAY_TYPE:
6040 if (TYPE_DOMAIN (type))
6041 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6042 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6044 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6045 hstate.add_object (typeless);
6048 break;
6050 case INTEGER_TYPE:
6052 tree t = TYPE_MAX_VALUE (type);
6053 if (!t)
6054 t = TYPE_MIN_VALUE (type);
6055 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6056 hstate.add_object (TREE_INT_CST_ELT (t, i));
6057 break;
6060 case BITINT_TYPE:
6062 unsigned prec = TYPE_PRECISION (type);
6063 unsigned uns = TYPE_UNSIGNED (type);
6064 hstate.add_object (prec);
6065 hstate.add_int (uns);
6066 break;
6069 case REAL_TYPE:
6070 case FIXED_POINT_TYPE:
6072 unsigned prec = TYPE_PRECISION (type);
6073 hstate.add_object (prec);
6074 break;
6077 case VECTOR_TYPE:
6078 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6079 break;
6081 default:
6082 break;
6085 return hstate.end ();
6088 /* These are the Hashtable callback functions. */
6090 /* Returns true iff the types are equivalent. */
6092 bool
6093 type_cache_hasher::equal (type_hash *a, type_hash *b)
6095 /* First test the things that are the same for all types. */
6096 if (a->hash != b->hash
6097 || TREE_CODE (a->type) != TREE_CODE (b->type)
6098 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6099 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6100 TYPE_ATTRIBUTES (b->type))
6101 || (TREE_CODE (a->type) != COMPLEX_TYPE
6102 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6103 return false;
6105 /* Be careful about comparing arrays before and after the element type
6106 has been completed; don't compare TYPE_ALIGN unless both types are
6107 complete. */
6108 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6109 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6110 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6111 return false;
6113 switch (TREE_CODE (a->type))
6115 case VOID_TYPE:
6116 case OPAQUE_TYPE:
6117 case COMPLEX_TYPE:
6118 case POINTER_TYPE:
6119 case REFERENCE_TYPE:
6120 case NULLPTR_TYPE:
6121 return true;
6123 case VECTOR_TYPE:
6124 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6125 TYPE_VECTOR_SUBPARTS (b->type));
6127 case ENUMERAL_TYPE:
6128 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6129 && !(TYPE_VALUES (a->type)
6130 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6131 && TYPE_VALUES (b->type)
6132 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6133 && type_list_equal (TYPE_VALUES (a->type),
6134 TYPE_VALUES (b->type))))
6135 return false;
6137 /* fall through */
6139 case INTEGER_TYPE:
6140 case REAL_TYPE:
6141 case BOOLEAN_TYPE:
6142 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6143 return false;
6144 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6145 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6146 TYPE_MAX_VALUE (b->type)))
6147 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6148 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6149 TYPE_MIN_VALUE (b->type))));
6151 case BITINT_TYPE:
6152 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6153 return false;
6154 return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
6156 case FIXED_POINT_TYPE:
6157 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6159 case OFFSET_TYPE:
6160 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6162 case METHOD_TYPE:
6163 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6164 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6165 || (TYPE_ARG_TYPES (a->type)
6166 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6167 && TYPE_ARG_TYPES (b->type)
6168 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6169 && type_list_equal (TYPE_ARG_TYPES (a->type),
6170 TYPE_ARG_TYPES (b->type)))))
6171 break;
6172 return false;
6173 case ARRAY_TYPE:
6174 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6175 where the flag should be inherited from the element type
6176 and can change after ARRAY_TYPEs are created; on non-aggregates
6177 compare it and hash it, scalars will never have that flag set
6178 and we need to differentiate between arrays created by different
6179 front-ends or middle-end created arrays. */
6180 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6181 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6182 || (TYPE_TYPELESS_STORAGE (a->type)
6183 == TYPE_TYPELESS_STORAGE (b->type))));
6185 case RECORD_TYPE:
6186 case UNION_TYPE:
6187 case QUAL_UNION_TYPE:
6188 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6189 || (TYPE_FIELDS (a->type)
6190 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6191 && TYPE_FIELDS (b->type)
6192 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6193 && type_list_equal (TYPE_FIELDS (a->type),
6194 TYPE_FIELDS (b->type))));
6196 case FUNCTION_TYPE:
6197 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6198 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6199 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6200 || (TYPE_ARG_TYPES (a->type)
6201 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6202 && TYPE_ARG_TYPES (b->type)
6203 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6204 && type_list_equal (TYPE_ARG_TYPES (a->type),
6205 TYPE_ARG_TYPES (b->type))))
6206 break;
6207 return false;
6209 default:
6210 return false;
6213 if (lang_hooks.types.type_hash_eq != NULL)
6214 return lang_hooks.types.type_hash_eq (a->type, b->type);
6216 return true;
6219 /* Given TYPE, and HASHCODE its hash code, return the canonical
6220 object for an identical type if one already exists.
6221 Otherwise, return TYPE, and record it as the canonical object.
6223 To use this function, first create a type of the sort you want.
6224 Then compute its hash code from the fields of the type that
6225 make it different from other similar types.
6226 Then call this function and use the value. */
6228 tree
6229 type_hash_canon (unsigned int hashcode, tree type)
6231 type_hash in;
6232 type_hash **loc;
6234 /* The hash table only contains main variants, so ensure that's what we're
6235 being passed. */
6236 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6238 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6239 must call that routine before comparing TYPE_ALIGNs. */
6240 layout_type (type);
6242 in.hash = hashcode;
6243 in.type = type;
6245 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6246 if (*loc)
6248 tree t1 = ((type_hash *) *loc)->type;
6249 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6250 && t1 != type);
6251 if (TYPE_UID (type) + 1 == next_type_uid)
6252 --next_type_uid;
6253 /* Free also min/max values and the cache for integer
6254 types. This can't be done in free_node, as LTO frees
6255 those on its own. */
6256 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
6258 if (TYPE_MIN_VALUE (type)
6259 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6261 /* Zero is always in TYPE_CACHED_VALUES. */
6262 if (! TYPE_UNSIGNED (type))
6263 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6264 ggc_free (TYPE_MIN_VALUE (type));
6266 if (TYPE_MAX_VALUE (type)
6267 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6269 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6270 ggc_free (TYPE_MAX_VALUE (type));
6272 if (TYPE_CACHED_VALUES_P (type))
6273 ggc_free (TYPE_CACHED_VALUES (type));
6275 free_node (type);
6276 return t1;
6278 else
6280 struct type_hash *h;
6282 h = ggc_alloc<type_hash> ();
6283 h->hash = hashcode;
6284 h->type = type;
6285 *loc = h;
6287 return type;
6291 static void
6292 print_type_hash_statistics (void)
6294 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6295 (long) type_hash_table->size (),
6296 (long) type_hash_table->elements (),
6297 type_hash_table->collisions ());
6300 /* Given two lists of types
6301 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6302 return 1 if the lists contain the same types in the same order.
6303 Also, the TREE_PURPOSEs must match. */
6305 bool
6306 type_list_equal (const_tree l1, const_tree l2)
6308 const_tree t1, t2;
6310 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6311 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6312 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6313 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6314 && (TREE_TYPE (TREE_PURPOSE (t1))
6315 == TREE_TYPE (TREE_PURPOSE (t2))))))
6316 return false;
6318 return t1 == t2;
6321 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6322 given by TYPE. If the argument list accepts variable arguments,
6323 then this function counts only the ordinary arguments. */
6326 type_num_arguments (const_tree fntype)
6328 int i = 0;
6330 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6331 /* If the function does not take a variable number of arguments,
6332 the last element in the list will have type `void'. */
6333 if (VOID_TYPE_P (TREE_VALUE (t)))
6334 break;
6335 else
6336 ++i;
6338 return i;
6341 /* Return the type of the function TYPE's argument ARGNO if known.
6342 For vararg function's where ARGNO refers to one of the variadic
6343 arguments return null. Otherwise, return a void_type_node for
6344 out-of-bounds ARGNO. */
6346 tree
6347 type_argument_type (const_tree fntype, unsigned argno)
6349 /* Treat zero the same as an out-of-bounds argument number. */
6350 if (!argno)
6351 return void_type_node;
6353 function_args_iterator iter;
6355 tree argtype;
6356 unsigned i = 1;
6357 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6359 /* A vararg function's argument list ends in a null. Otherwise,
6360 an ordinary function's argument list ends with void. Return
6361 null if ARGNO refers to a vararg argument, void_type_node if
6362 it's out of bounds, and the formal argument type otherwise. */
6363 if (!argtype)
6364 break;
6366 if (i == argno || VOID_TYPE_P (argtype))
6367 return argtype;
6369 ++i;
6372 return NULL_TREE;
6375 /* True if integer constants T1 and T2
6376 represent the same constant value. */
6378 bool
6379 tree_int_cst_equal (const_tree t1, const_tree t2)
6381 if (t1 == t2)
6382 return true;
6384 if (t1 == 0 || t2 == 0)
6385 return false;
6387 STRIP_ANY_LOCATION_WRAPPER (t1);
6388 STRIP_ANY_LOCATION_WRAPPER (t2);
6390 if (TREE_CODE (t1) == INTEGER_CST
6391 && TREE_CODE (t2) == INTEGER_CST
6392 && wi::to_widest (t1) == wi::to_widest (t2))
6393 return true;
6395 return false;
6398 /* Return true if T is an INTEGER_CST whose numerical value (extended
6399 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6401 bool
6402 tree_fits_shwi_p (const_tree t)
6404 return (t != NULL_TREE
6405 && TREE_CODE (t) == INTEGER_CST
6406 && wi::fits_shwi_p (wi::to_widest (t)));
6409 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6410 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6412 bool
6413 tree_fits_poly_int64_p (const_tree t)
6415 if (t == NULL_TREE)
6416 return false;
6417 if (POLY_INT_CST_P (t))
6419 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6420 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6421 return false;
6422 return true;
6424 return (TREE_CODE (t) == INTEGER_CST
6425 && wi::fits_shwi_p (wi::to_widest (t)));
6428 /* Return true if T is an INTEGER_CST whose numerical value (extended
6429 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6431 bool
6432 tree_fits_uhwi_p (const_tree t)
6434 return (t != NULL_TREE
6435 && TREE_CODE (t) == INTEGER_CST
6436 && wi::fits_uhwi_p (wi::to_widest (t)));
6439 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6440 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6442 bool
6443 tree_fits_poly_uint64_p (const_tree t)
6445 if (t == NULL_TREE)
6446 return false;
6447 if (POLY_INT_CST_P (t))
6449 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6450 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6451 return false;
6452 return true;
6454 return (TREE_CODE (t) == INTEGER_CST
6455 && wi::fits_uhwi_p (wi::to_widest (t)));
6458 /* T is an INTEGER_CST whose numerical value (extended according to
6459 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6460 HOST_WIDE_INT. */
6462 HOST_WIDE_INT
6463 tree_to_shwi (const_tree t)
6465 gcc_assert (tree_fits_shwi_p (t));
6466 return TREE_INT_CST_LOW (t);
6469 /* T is an INTEGER_CST whose numerical value (extended according to
6470 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6471 HOST_WIDE_INT. */
6473 unsigned HOST_WIDE_INT
6474 tree_to_uhwi (const_tree t)
6476 gcc_assert (tree_fits_uhwi_p (t));
6477 return TREE_INT_CST_LOW (t);
6480 /* Return the most significant (sign) bit of T. */
6483 tree_int_cst_sign_bit (const_tree t)
6485 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6487 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6490 /* Return an indication of the sign of the integer constant T.
6491 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6492 Note that -1 will never be returned if T's type is unsigned. */
6495 tree_int_cst_sgn (const_tree t)
6497 if (wi::to_wide (t) == 0)
6498 return 0;
6499 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6500 return 1;
6501 else if (wi::neg_p (wi::to_wide (t)))
6502 return -1;
6503 else
6504 return 1;
6507 /* Return the minimum number of bits needed to represent VALUE in a
6508 signed or unsigned type, UNSIGNEDP says which. */
6510 unsigned int
6511 tree_int_cst_min_precision (tree value, signop sgn)
6513 /* If the value is negative, compute its negative minus 1. The latter
6514 adjustment is because the absolute value of the largest negative value
6515 is one larger than the largest positive value. This is equivalent to
6516 a bit-wise negation, so use that operation instead. */
6518 if (tree_int_cst_sgn (value) < 0)
6519 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6521 /* Return the number of bits needed, taking into account the fact
6522 that we need one more bit for a signed than unsigned type.
6523 If value is 0 or -1, the minimum precision is 1 no matter
6524 whether unsignedp is true or false. */
6526 if (integer_zerop (value))
6527 return 1;
6528 else
6529 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6532 /* Return truthvalue of whether T1 is the same tree structure as T2.
6533 Return 1 if they are the same.
6534 Return 0 if they are understandably different.
6535 Return -1 if either contains tree structure not understood by
6536 this function. */
6539 simple_cst_equal (const_tree t1, const_tree t2)
6541 enum tree_code code1, code2;
6542 int cmp;
6543 int i;
6545 if (t1 == t2)
6546 return 1;
6547 if (t1 == 0 || t2 == 0)
6548 return 0;
6550 /* For location wrappers to be the same, they must be at the same
6551 source location (and wrap the same thing). */
6552 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6554 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6555 return 0;
6556 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6559 code1 = TREE_CODE (t1);
6560 code2 = TREE_CODE (t2);
6562 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6564 if (CONVERT_EXPR_CODE_P (code2)
6565 || code2 == NON_LVALUE_EXPR)
6566 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6567 else
6568 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6571 else if (CONVERT_EXPR_CODE_P (code2)
6572 || code2 == NON_LVALUE_EXPR)
6573 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6575 if (code1 != code2)
6576 return 0;
6578 switch (code1)
6580 case INTEGER_CST:
6581 return wi::to_widest (t1) == wi::to_widest (t2);
6583 case REAL_CST:
6584 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6586 case FIXED_CST:
6587 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6589 case STRING_CST:
6590 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6591 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6592 TREE_STRING_LENGTH (t1)));
6594 case CONSTRUCTOR:
6596 unsigned HOST_WIDE_INT idx;
6597 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6598 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6600 if (vec_safe_length (v1) != vec_safe_length (v2))
6601 return false;
6603 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6604 /* ??? Should we handle also fields here? */
6605 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6606 return false;
6607 return true;
6610 case SAVE_EXPR:
6611 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6613 case CALL_EXPR:
6614 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6615 if (cmp <= 0)
6616 return cmp;
6617 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6618 return 0;
6620 const_tree arg1, arg2;
6621 const_call_expr_arg_iterator iter1, iter2;
6622 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6623 arg2 = first_const_call_expr_arg (t2, &iter2);
6624 arg1 && arg2;
6625 arg1 = next_const_call_expr_arg (&iter1),
6626 arg2 = next_const_call_expr_arg (&iter2))
6628 cmp = simple_cst_equal (arg1, arg2);
6629 if (cmp <= 0)
6630 return cmp;
6632 return arg1 == arg2;
6635 case TARGET_EXPR:
6636 /* Special case: if either target is an unallocated VAR_DECL,
6637 it means that it's going to be unified with whatever the
6638 TARGET_EXPR is really supposed to initialize, so treat it
6639 as being equivalent to anything. */
6640 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6641 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6642 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6643 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6644 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6645 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6646 cmp = 1;
6647 else
6648 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6650 if (cmp <= 0)
6651 return cmp;
6653 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6655 case WITH_CLEANUP_EXPR:
6656 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6657 if (cmp <= 0)
6658 return cmp;
6660 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6662 case COMPONENT_REF:
6663 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6664 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6666 return 0;
6668 case VAR_DECL:
6669 case PARM_DECL:
6670 case CONST_DECL:
6671 case FUNCTION_DECL:
6672 return 0;
6674 default:
6675 if (POLY_INT_CST_P (t1))
6676 /* A false return means maybe_ne rather than known_ne. */
6677 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6678 TYPE_SIGN (TREE_TYPE (t1))),
6679 poly_widest_int::from (poly_int_cst_value (t2),
6680 TYPE_SIGN (TREE_TYPE (t2))));
6681 break;
6684 /* This general rule works for most tree codes. All exceptions should be
6685 handled above. If this is a language-specific tree code, we can't
6686 trust what might be in the operand, so say we don't know
6687 the situation. */
6688 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6689 return -1;
6691 switch (TREE_CODE_CLASS (code1))
6693 case tcc_unary:
6694 case tcc_binary:
6695 case tcc_comparison:
6696 case tcc_expression:
6697 case tcc_reference:
6698 case tcc_statement:
6699 cmp = 1;
6700 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6702 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6703 if (cmp <= 0)
6704 return cmp;
6707 return cmp;
6709 default:
6710 return -1;
6714 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6715 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6716 than U, respectively. */
6719 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6721 if (tree_int_cst_sgn (t) < 0)
6722 return -1;
6723 else if (!tree_fits_uhwi_p (t))
6724 return 1;
6725 else if (TREE_INT_CST_LOW (t) == u)
6726 return 0;
6727 else if (TREE_INT_CST_LOW (t) < u)
6728 return -1;
6729 else
6730 return 1;
6733 /* Return true if SIZE represents a constant size that is in bounds of
6734 what the middle-end and the backend accepts (covering not more than
6735 half of the address-space).
6736 When PERR is non-null, set *PERR on failure to the description of
6737 why SIZE is not valid. */
6739 bool
6740 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6742 if (POLY_INT_CST_P (size))
6744 if (TREE_OVERFLOW (size))
6745 return false;
6746 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6747 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6748 return false;
6749 return true;
6752 cst_size_error error;
6753 if (!perr)
6754 perr = &error;
6756 if (TREE_CODE (size) != INTEGER_CST)
6758 *perr = cst_size_not_constant;
6759 return false;
6762 if (TREE_OVERFLOW_P (size))
6764 *perr = cst_size_overflow;
6765 return false;
6768 if (tree_int_cst_sgn (size) < 0)
6770 *perr = cst_size_negative;
6771 return false;
6773 if (!tree_fits_uhwi_p (size)
6774 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6775 < wi::to_widest (size) * 2))
6777 *perr = cst_size_too_big;
6778 return false;
6781 return true;
6784 /* Return the precision of the type, or for a complex or vector type the
6785 precision of the type of its elements. */
6787 unsigned int
6788 element_precision (const_tree type)
6790 if (!TYPE_P (type))
6791 type = TREE_TYPE (type);
6792 enum tree_code code = TREE_CODE (type);
6793 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6794 type = TREE_TYPE (type);
6796 return TYPE_PRECISION (type);
6799 /* Return true if CODE represents an associative tree code. Otherwise
6800 return false. */
6801 bool
6802 associative_tree_code (enum tree_code code)
6804 switch (code)
6806 case BIT_IOR_EXPR:
6807 case BIT_AND_EXPR:
6808 case BIT_XOR_EXPR:
6809 case PLUS_EXPR:
6810 case MULT_EXPR:
6811 case MIN_EXPR:
6812 case MAX_EXPR:
6813 return true;
6815 default:
6816 break;
6818 return false;
6821 /* Return true if CODE represents a commutative tree code. Otherwise
6822 return false. */
6823 bool
6824 commutative_tree_code (enum tree_code code)
6826 switch (code)
6828 case PLUS_EXPR:
6829 case MULT_EXPR:
6830 case MULT_HIGHPART_EXPR:
6831 case MIN_EXPR:
6832 case MAX_EXPR:
6833 case BIT_IOR_EXPR:
6834 case BIT_XOR_EXPR:
6835 case BIT_AND_EXPR:
6836 case NE_EXPR:
6837 case EQ_EXPR:
6838 case UNORDERED_EXPR:
6839 case ORDERED_EXPR:
6840 case UNEQ_EXPR:
6841 case LTGT_EXPR:
6842 case TRUTH_AND_EXPR:
6843 case TRUTH_XOR_EXPR:
6844 case TRUTH_OR_EXPR:
6845 case WIDEN_MULT_EXPR:
6846 case VEC_WIDEN_MULT_HI_EXPR:
6847 case VEC_WIDEN_MULT_LO_EXPR:
6848 case VEC_WIDEN_MULT_EVEN_EXPR:
6849 case VEC_WIDEN_MULT_ODD_EXPR:
6850 return true;
6852 default:
6853 break;
6855 return false;
6858 /* Return true if CODE represents a ternary tree code for which the
6859 first two operands are commutative. Otherwise return false. */
6860 bool
6861 commutative_ternary_tree_code (enum tree_code code)
6863 switch (code)
6865 case WIDEN_MULT_PLUS_EXPR:
6866 case WIDEN_MULT_MINUS_EXPR:
6867 case DOT_PROD_EXPR:
6868 return true;
6870 default:
6871 break;
6873 return false;
6876 /* Returns true if CODE can overflow. */
6878 bool
6879 operation_can_overflow (enum tree_code code)
6881 switch (code)
6883 case PLUS_EXPR:
6884 case MINUS_EXPR:
6885 case MULT_EXPR:
6886 case LSHIFT_EXPR:
6887 /* Can overflow in various ways. */
6888 return true;
6889 case TRUNC_DIV_EXPR:
6890 case EXACT_DIV_EXPR:
6891 case FLOOR_DIV_EXPR:
6892 case CEIL_DIV_EXPR:
6893 /* For INT_MIN / -1. */
6894 return true;
6895 case NEGATE_EXPR:
6896 case ABS_EXPR:
6897 /* For -INT_MIN. */
6898 return true;
6899 default:
6900 /* These operators cannot overflow. */
6901 return false;
6905 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6906 ftrapv doesn't generate trapping insns for CODE. */
6908 bool
6909 operation_no_trapping_overflow (tree type, enum tree_code code)
6911 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6913 /* We don't generate instructions that trap on overflow for complex or vector
6914 types. */
6915 if (!INTEGRAL_TYPE_P (type))
6916 return true;
6918 if (!TYPE_OVERFLOW_TRAPS (type))
6919 return true;
6921 switch (code)
6923 case PLUS_EXPR:
6924 case MINUS_EXPR:
6925 case MULT_EXPR:
6926 case NEGATE_EXPR:
6927 case ABS_EXPR:
6928 /* These operators can overflow, and -ftrapv generates trapping code for
6929 these. */
6930 return false;
6931 case TRUNC_DIV_EXPR:
6932 case EXACT_DIV_EXPR:
6933 case FLOOR_DIV_EXPR:
6934 case CEIL_DIV_EXPR:
6935 case LSHIFT_EXPR:
6936 /* These operators can overflow, but -ftrapv does not generate trapping
6937 code for these. */
6938 return true;
6939 default:
6940 /* These operators cannot overflow. */
6941 return true;
6945 /* Constructors for pointer, array and function types.
6946 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6947 constructed by language-dependent code, not here.) */
6949 /* Construct, lay out and return the type of pointers to TO_TYPE with
6950 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6951 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6952 indicate this type can reference all of memory. If such a type has
6953 already been constructed, reuse it. */
6955 tree
6956 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6957 bool can_alias_all)
6959 tree t;
6960 bool could_alias = can_alias_all;
6962 if (to_type == error_mark_node)
6963 return error_mark_node;
6965 if (mode == VOIDmode)
6967 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6968 mode = targetm.addr_space.pointer_mode (as);
6971 /* If the pointed-to type has the may_alias attribute set, force
6972 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6973 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6974 can_alias_all = true;
6976 /* In some cases, languages will have things that aren't a POINTER_TYPE
6977 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6978 In that case, return that type without regard to the rest of our
6979 operands.
6981 ??? This is a kludge, but consistent with the way this function has
6982 always operated and there doesn't seem to be a good way to avoid this
6983 at the moment. */
6984 if (TYPE_POINTER_TO (to_type) != 0
6985 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6986 return TYPE_POINTER_TO (to_type);
6988 /* First, if we already have a type for pointers to TO_TYPE and it's
6989 the proper mode, use it. */
6990 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6991 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6992 return t;
6994 t = make_node (POINTER_TYPE);
6996 TREE_TYPE (t) = to_type;
6997 SET_TYPE_MODE (t, mode);
6998 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6999 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7000 TYPE_POINTER_TO (to_type) = t;
7002 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7003 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7004 SET_TYPE_STRUCTURAL_EQUALITY (t);
7005 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7006 TYPE_CANONICAL (t)
7007 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7008 mode, false);
7010 /* Lay out the type. This function has many callers that are concerned
7011 with expression-construction, and this simplifies them all. */
7012 layout_type (t);
7014 return t;
7017 /* By default build pointers in ptr_mode. */
7019 tree
7020 build_pointer_type (tree to_type)
7022 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7025 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7027 tree
7028 build_reference_type_for_mode (tree to_type, machine_mode mode,
7029 bool can_alias_all)
7031 tree t;
7032 bool could_alias = can_alias_all;
7034 if (to_type == error_mark_node)
7035 return error_mark_node;
7037 if (mode == VOIDmode)
7039 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7040 mode = targetm.addr_space.pointer_mode (as);
7043 /* If the pointed-to type has the may_alias attribute set, force
7044 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7045 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7046 can_alias_all = true;
7048 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7049 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7050 In that case, return that type without regard to the rest of our
7051 operands.
7053 ??? This is a kludge, but consistent with the way this function has
7054 always operated and there doesn't seem to be a good way to avoid this
7055 at the moment. */
7056 if (TYPE_REFERENCE_TO (to_type) != 0
7057 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7058 return TYPE_REFERENCE_TO (to_type);
7060 /* First, if we already have a type for pointers to TO_TYPE and it's
7061 the proper mode, use it. */
7062 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7063 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7064 return t;
7066 t = make_node (REFERENCE_TYPE);
7068 TREE_TYPE (t) = to_type;
7069 SET_TYPE_MODE (t, mode);
7070 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7071 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7072 TYPE_REFERENCE_TO (to_type) = t;
7074 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7075 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7076 SET_TYPE_STRUCTURAL_EQUALITY (t);
7077 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7078 TYPE_CANONICAL (t)
7079 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7080 mode, false);
7082 layout_type (t);
7084 return t;
7088 /* Build the node for the type of references-to-TO_TYPE by default
7089 in ptr_mode. */
7091 tree
7092 build_reference_type (tree to_type)
7094 return build_reference_type_for_mode (to_type, VOIDmode, false);
7097 #define MAX_INT_CACHED_PREC \
7098 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7099 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7101 static void
7102 clear_nonstandard_integer_type_cache (void)
7104 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7106 nonstandard_integer_type_cache[i] = NULL;
7110 /* Builds a signed or unsigned integer type of precision PRECISION.
7111 Used for C bitfields whose precision does not match that of
7112 built-in target types. */
7113 tree
7114 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7115 int unsignedp)
7117 tree itype, ret;
7119 if (unsignedp)
7120 unsignedp = MAX_INT_CACHED_PREC + 1;
7122 if (precision <= MAX_INT_CACHED_PREC)
7124 itype = nonstandard_integer_type_cache[precision + unsignedp];
7125 if (itype)
7126 return itype;
7129 itype = make_node (INTEGER_TYPE);
7130 TYPE_PRECISION (itype) = precision;
7132 if (unsignedp)
7133 fixup_unsigned_type (itype);
7134 else
7135 fixup_signed_type (itype);
7137 inchash::hash hstate;
7138 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7139 ret = type_hash_canon (hstate.end (), itype);
7140 if (precision <= MAX_INT_CACHED_PREC)
7141 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7143 return ret;
7146 #define MAX_BOOL_CACHED_PREC \
7147 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7148 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7150 /* Builds a boolean type of precision PRECISION.
7151 Used for boolean vectors to choose proper vector element size. */
7152 tree
7153 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7155 tree type;
7157 if (precision <= MAX_BOOL_CACHED_PREC)
7159 type = nonstandard_boolean_type_cache[precision];
7160 if (type)
7161 return type;
7164 type = make_node (BOOLEAN_TYPE);
7165 TYPE_PRECISION (type) = precision;
7166 fixup_signed_type (type);
7168 if (precision <= MAX_INT_CACHED_PREC)
7169 nonstandard_boolean_type_cache[precision] = type;
7171 return type;
7174 static GTY(()) vec<tree, va_gc> *bitint_type_cache;
7176 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7177 tree
7178 build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
7180 tree itype, ret;
7182 gcc_checking_assert (precision >= 1 + !unsignedp);
7184 if (unsignedp)
7185 unsignedp = MAX_INT_CACHED_PREC + 1;
7187 if (bitint_type_cache == NULL)
7188 vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
7190 if (precision <= MAX_INT_CACHED_PREC)
7192 itype = (*bitint_type_cache)[precision + unsignedp];
7193 if (itype)
7194 return itype;
7197 itype = make_node (BITINT_TYPE);
7198 TYPE_PRECISION (itype) = precision;
7200 if (unsignedp)
7201 fixup_unsigned_type (itype);
7202 else
7203 fixup_signed_type (itype);
7205 inchash::hash hstate;
7206 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7207 ret = type_hash_canon (hstate.end (), itype);
7208 if (precision <= MAX_INT_CACHED_PREC)
7209 (*bitint_type_cache)[precision + unsignedp] = ret;
7211 return ret;
7214 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7215 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7216 is true, reuse such a type that has already been constructed. */
7218 static tree
7219 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7221 tree itype = make_node (INTEGER_TYPE);
7223 TREE_TYPE (itype) = type;
7225 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7226 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7228 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7229 SET_TYPE_MODE (itype, TYPE_MODE (type));
7230 TYPE_SIZE (itype) = TYPE_SIZE (type);
7231 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7232 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7233 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7234 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7236 if (!shared)
7237 return itype;
7239 if ((TYPE_MIN_VALUE (itype)
7240 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7241 || (TYPE_MAX_VALUE (itype)
7242 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7244 /* Since we cannot reliably merge this type, we need to compare it using
7245 structural equality checks. */
7246 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7247 return itype;
7250 hashval_t hash = type_hash_canon_hash (itype);
7251 itype = type_hash_canon (hash, itype);
7253 return itype;
7256 /* Wrapper around build_range_type_1 with SHARED set to true. */
7258 tree
7259 build_range_type (tree type, tree lowval, tree highval)
7261 return build_range_type_1 (type, lowval, highval, true);
7264 /* Wrapper around build_range_type_1 with SHARED set to false. */
7266 tree
7267 build_nonshared_range_type (tree type, tree lowval, tree highval)
7269 return build_range_type_1 (type, lowval, highval, false);
7272 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7273 MAXVAL should be the maximum value in the domain
7274 (one less than the length of the array).
7276 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7277 We don't enforce this limit, that is up to caller (e.g. language front end).
7278 The limit exists because the result is a signed type and we don't handle
7279 sizes that use more than one HOST_WIDE_INT. */
7281 tree
7282 build_index_type (tree maxval)
7284 return build_range_type (sizetype, size_zero_node, maxval);
7287 /* Return true if the debug information for TYPE, a subtype, should be emitted
7288 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7289 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7290 debug info and doesn't reflect the source code. */
7292 bool
7293 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7295 tree base_type = TREE_TYPE (type), low, high;
7297 /* Subrange types have a base type which is an integral type. */
7298 if (!INTEGRAL_TYPE_P (base_type))
7299 return false;
7301 /* Get the real bounds of the subtype. */
7302 if (lang_hooks.types.get_subrange_bounds)
7303 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7304 else
7306 low = TYPE_MIN_VALUE (type);
7307 high = TYPE_MAX_VALUE (type);
7310 /* If the type and its base type have the same representation and the same
7311 name, then the type is not a subrange but a copy of the base type. */
7312 if ((TREE_CODE (base_type) == INTEGER_TYPE
7313 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7314 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7315 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7316 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7317 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7318 return false;
7320 if (lowval)
7321 *lowval = low;
7322 if (highval)
7323 *highval = high;
7324 return true;
7327 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7328 and number of elements specified by the range of values of INDEX_TYPE.
7329 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7330 If SHARED is true, reuse such a type that has already been constructed.
7331 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7333 tree
7334 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7335 bool shared, bool set_canonical)
7337 tree t;
7339 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7341 error ("arrays of functions are not meaningful");
7342 elt_type = integer_type_node;
7345 t = make_node (ARRAY_TYPE);
7346 TREE_TYPE (t) = elt_type;
7347 TYPE_DOMAIN (t) = index_type;
7348 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7349 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7350 layout_type (t);
7352 if (shared)
7354 hashval_t hash = type_hash_canon_hash (t);
7355 t = type_hash_canon (hash, t);
7358 if (TYPE_CANONICAL (t) == t && set_canonical)
7360 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7361 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7362 || in_lto_p)
7363 SET_TYPE_STRUCTURAL_EQUALITY (t);
7364 else if (TYPE_CANONICAL (elt_type) != elt_type
7365 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7366 TYPE_CANONICAL (t)
7367 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7368 index_type
7369 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7370 typeless_storage, shared, set_canonical);
7373 return t;
7376 /* Wrapper around build_array_type_1 with SHARED set to true. */
7378 tree
7379 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7381 return
7382 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7385 /* Wrapper around build_array_type_1 with SHARED set to false. */
7387 tree
7388 build_nonshared_array_type (tree elt_type, tree index_type)
7390 return build_array_type_1 (elt_type, index_type, false, false, true);
7393 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7394 sizetype. */
7396 tree
7397 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7399 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7402 /* Computes the canonical argument types from the argument type list
7403 ARGTYPES.
7405 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7406 on entry to this function, or if any of the ARGTYPES are
7407 structural.
7409 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7410 true on entry to this function, or if any of the ARGTYPES are
7411 non-canonical.
7413 Returns a canonical argument list, which may be ARGTYPES when the
7414 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7415 true) or would not differ from ARGTYPES. */
7417 static tree
7418 maybe_canonicalize_argtypes (tree argtypes,
7419 bool *any_structural_p,
7420 bool *any_noncanonical_p)
7422 tree arg;
7423 bool any_noncanonical_argtypes_p = false;
7425 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7427 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7428 /* Fail gracefully by stating that the type is structural. */
7429 *any_structural_p = true;
7430 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7431 *any_structural_p = true;
7432 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7433 || TREE_PURPOSE (arg))
7434 /* If the argument has a default argument, we consider it
7435 non-canonical even though the type itself is canonical.
7436 That way, different variants of function and method types
7437 with default arguments will all point to the variant with
7438 no defaults as their canonical type. */
7439 any_noncanonical_argtypes_p = true;
7442 if (*any_structural_p)
7443 return argtypes;
7445 if (any_noncanonical_argtypes_p)
7447 /* Build the canonical list of argument types. */
7448 tree canon_argtypes = NULL_TREE;
7449 bool is_void = false;
7451 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7453 if (arg == void_list_node)
7454 is_void = true;
7455 else
7456 canon_argtypes = tree_cons (NULL_TREE,
7457 TYPE_CANONICAL (TREE_VALUE (arg)),
7458 canon_argtypes);
7461 canon_argtypes = nreverse (canon_argtypes);
7462 if (is_void)
7463 canon_argtypes = chainon (canon_argtypes, void_list_node);
7465 /* There is a non-canonical type. */
7466 *any_noncanonical_p = true;
7467 return canon_argtypes;
7470 /* The canonical argument types are the same as ARGTYPES. */
7471 return argtypes;
7474 /* Construct, lay out and return
7475 the type of functions returning type VALUE_TYPE
7476 given arguments of types ARG_TYPES.
7477 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7478 are data type nodes for the arguments of the function.
7479 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7480 variable-arguments function with (...) prototype (no named arguments).
7481 If such a type has already been constructed, reuse it. */
7483 tree
7484 build_function_type (tree value_type, tree arg_types,
7485 bool no_named_args_stdarg_p)
7487 tree t;
7488 inchash::hash hstate;
7489 bool any_structural_p, any_noncanonical_p;
7490 tree canon_argtypes;
7492 gcc_assert (arg_types != error_mark_node);
7494 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7496 error ("function return type cannot be function");
7497 value_type = integer_type_node;
7500 /* Make a node of the sort we want. */
7501 t = make_node (FUNCTION_TYPE);
7502 TREE_TYPE (t) = value_type;
7503 TYPE_ARG_TYPES (t) = arg_types;
7504 if (no_named_args_stdarg_p)
7506 gcc_assert (arg_types == NULL_TREE);
7507 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7510 /* If we already have such a type, use the old one. */
7511 hashval_t hash = type_hash_canon_hash (t);
7512 t = type_hash_canon (hash, t);
7514 /* Set up the canonical type. */
7515 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7516 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7517 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7518 &any_structural_p,
7519 &any_noncanonical_p);
7520 if (any_structural_p)
7521 SET_TYPE_STRUCTURAL_EQUALITY (t);
7522 else if (any_noncanonical_p)
7523 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7524 canon_argtypes);
7526 if (!COMPLETE_TYPE_P (t))
7527 layout_type (t);
7528 return t;
7531 /* Build a function type. The RETURN_TYPE is the type returned by the
7532 function. If VAARGS is set, no void_type_node is appended to the
7533 list. ARGP must be always be terminated be a NULL_TREE. */
7535 static tree
7536 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7538 tree t, args, last;
7540 t = va_arg (argp, tree);
7541 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7542 args = tree_cons (NULL_TREE, t, args);
7544 if (vaargs)
7546 last = args;
7547 if (args != NULL_TREE)
7548 args = nreverse (args);
7549 gcc_assert (last != void_list_node);
7551 else if (args == NULL_TREE)
7552 args = void_list_node;
7553 else
7555 last = args;
7556 args = nreverse (args);
7557 TREE_CHAIN (last) = void_list_node;
7559 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7561 return args;
7564 /* Build a function type. The RETURN_TYPE is the type returned by the
7565 function. If additional arguments are provided, they are
7566 additional argument types. The list of argument types must always
7567 be terminated by NULL_TREE. */
7569 tree
7570 build_function_type_list (tree return_type, ...)
7572 tree args;
7573 va_list p;
7575 va_start (p, return_type);
7576 args = build_function_type_list_1 (false, return_type, p);
7577 va_end (p);
7578 return args;
7581 /* Build a variable argument function type. The RETURN_TYPE is the
7582 type returned by the function. If additional arguments are provided,
7583 they are additional argument types. The list of argument types must
7584 always be terminated by NULL_TREE. */
7586 tree
7587 build_varargs_function_type_list (tree return_type, ...)
7589 tree args;
7590 va_list p;
7592 va_start (p, return_type);
7593 args = build_function_type_list_1 (true, return_type, p);
7594 va_end (p);
7596 return args;
7599 /* Build a function type. RETURN_TYPE is the type returned by the
7600 function; VAARGS indicates whether the function takes varargs. The
7601 function takes N named arguments, the types of which are provided in
7602 ARG_TYPES. */
7604 static tree
7605 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7606 tree *arg_types)
7608 int i;
7609 tree t = vaargs ? NULL_TREE : void_list_node;
7611 for (i = n - 1; i >= 0; i--)
7612 t = tree_cons (NULL_TREE, arg_types[i], t);
7614 return build_function_type (return_type, t, vaargs && n == 0);
7617 /* Build a function type. RETURN_TYPE is the type returned by the
7618 function. The function takes N named arguments, the types of which
7619 are provided in ARG_TYPES. */
7621 tree
7622 build_function_type_array (tree return_type, int n, tree *arg_types)
7624 return build_function_type_array_1 (false, return_type, n, arg_types);
7627 /* Build a variable argument function type. RETURN_TYPE is the type
7628 returned by the function. The function takes N named arguments, the
7629 types of which are provided in ARG_TYPES. */
7631 tree
7632 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7634 return build_function_type_array_1 (true, return_type, n, arg_types);
7637 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7638 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7639 for the method. An implicit additional parameter (of type
7640 pointer-to-BASETYPE) is added to the ARGTYPES. */
7642 tree
7643 build_method_type_directly (tree basetype,
7644 tree rettype,
7645 tree argtypes)
7647 tree t;
7648 tree ptype;
7649 bool any_structural_p, any_noncanonical_p;
7650 tree canon_argtypes;
7652 /* Make a node of the sort we want. */
7653 t = make_node (METHOD_TYPE);
7655 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7656 TREE_TYPE (t) = rettype;
7657 ptype = build_pointer_type (basetype);
7659 /* The actual arglist for this function includes a "hidden" argument
7660 which is "this". Put it into the list of argument types. */
7661 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7662 TYPE_ARG_TYPES (t) = argtypes;
7664 /* If we already have such a type, use the old one. */
7665 hashval_t hash = type_hash_canon_hash (t);
7666 t = type_hash_canon (hash, t);
7668 /* Set up the canonical type. */
7669 any_structural_p
7670 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7671 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7672 any_noncanonical_p
7673 = (TYPE_CANONICAL (basetype) != basetype
7674 || TYPE_CANONICAL (rettype) != rettype);
7675 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7676 &any_structural_p,
7677 &any_noncanonical_p);
7678 if (any_structural_p)
7679 SET_TYPE_STRUCTURAL_EQUALITY (t);
7680 else if (any_noncanonical_p)
7681 TYPE_CANONICAL (t)
7682 = build_method_type_directly (TYPE_CANONICAL (basetype),
7683 TYPE_CANONICAL (rettype),
7684 canon_argtypes);
7685 if (!COMPLETE_TYPE_P (t))
7686 layout_type (t);
7688 return t;
7691 /* Construct, lay out and return the type of methods belonging to class
7692 BASETYPE and whose arguments and values are described by TYPE.
7693 If that type exists already, reuse it.
7694 TYPE must be a FUNCTION_TYPE node. */
7696 tree
7697 build_method_type (tree basetype, tree type)
7699 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7701 return build_method_type_directly (basetype,
7702 TREE_TYPE (type),
7703 TYPE_ARG_TYPES (type));
7706 /* Construct, lay out and return the type of offsets to a value
7707 of type TYPE, within an object of type BASETYPE.
7708 If a suitable offset type exists already, reuse it. */
7710 tree
7711 build_offset_type (tree basetype, tree type)
7713 tree t;
7715 /* Make a node of the sort we want. */
7716 t = make_node (OFFSET_TYPE);
7718 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7719 TREE_TYPE (t) = type;
7721 /* If we already have such a type, use the old one. */
7722 hashval_t hash = type_hash_canon_hash (t);
7723 t = type_hash_canon (hash, t);
7725 if (!COMPLETE_TYPE_P (t))
7726 layout_type (t);
7728 if (TYPE_CANONICAL (t) == t)
7730 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7731 || TYPE_STRUCTURAL_EQUALITY_P (type))
7732 SET_TYPE_STRUCTURAL_EQUALITY (t);
7733 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7734 || TYPE_CANONICAL (type) != type)
7735 TYPE_CANONICAL (t)
7736 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7737 TYPE_CANONICAL (type));
7740 return t;
7743 /* Create a complex type whose components are COMPONENT_TYPE.
7745 If NAMED is true, the type is given a TYPE_NAME. We do not always
7746 do so because this creates a DECL node and thus make the DECL_UIDs
7747 dependent on the type canonicalization hashtable, which is GC-ed,
7748 so the DECL_UIDs would not be stable wrt garbage collection. */
7750 tree
7751 build_complex_type (tree component_type, bool named)
7753 gcc_assert (INTEGRAL_TYPE_P (component_type)
7754 || SCALAR_FLOAT_TYPE_P (component_type)
7755 || FIXED_POINT_TYPE_P (component_type));
7757 /* Make a node of the sort we want. */
7758 tree probe = make_node (COMPLEX_TYPE);
7760 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7762 /* If we already have such a type, use the old one. */
7763 hashval_t hash = type_hash_canon_hash (probe);
7764 tree t = type_hash_canon (hash, probe);
7766 if (t == probe)
7768 /* We created a new type. The hash insertion will have laid
7769 out the type. We need to check the canonicalization and
7770 maybe set the name. */
7771 gcc_checking_assert (COMPLETE_TYPE_P (t)
7772 && !TYPE_NAME (t)
7773 && TYPE_CANONICAL (t) == t);
7775 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7776 SET_TYPE_STRUCTURAL_EQUALITY (t);
7777 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7778 TYPE_CANONICAL (t)
7779 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7781 /* We need to create a name, since complex is a fundamental type. */
7782 if (named)
7784 const char *name = NULL;
7786 if (TREE_TYPE (t) == char_type_node)
7787 name = "complex char";
7788 else if (TREE_TYPE (t) == signed_char_type_node)
7789 name = "complex signed char";
7790 else if (TREE_TYPE (t) == unsigned_char_type_node)
7791 name = "complex unsigned char";
7792 else if (TREE_TYPE (t) == short_integer_type_node)
7793 name = "complex short int";
7794 else if (TREE_TYPE (t) == short_unsigned_type_node)
7795 name = "complex short unsigned int";
7796 else if (TREE_TYPE (t) == integer_type_node)
7797 name = "complex int";
7798 else if (TREE_TYPE (t) == unsigned_type_node)
7799 name = "complex unsigned int";
7800 else if (TREE_TYPE (t) == long_integer_type_node)
7801 name = "complex long int";
7802 else if (TREE_TYPE (t) == long_unsigned_type_node)
7803 name = "complex long unsigned int";
7804 else if (TREE_TYPE (t) == long_long_integer_type_node)
7805 name = "complex long long int";
7806 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7807 name = "complex long long unsigned int";
7809 if (name != NULL)
7810 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7811 get_identifier (name), t);
7815 return build_qualified_type (t, TYPE_QUALS (component_type));
7818 /* If TYPE is a real or complex floating-point type and the target
7819 does not directly support arithmetic on TYPE then return the wider
7820 type to be used for arithmetic on TYPE. Otherwise, return
7821 NULL_TREE. */
7823 tree
7824 excess_precision_type (tree type)
7826 /* The target can give two different responses to the question of
7827 which excess precision mode it would like depending on whether we
7828 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7830 enum excess_precision_type requested_type
7831 = (flag_excess_precision == EXCESS_PRECISION_FAST
7832 ? EXCESS_PRECISION_TYPE_FAST
7833 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7834 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7836 enum flt_eval_method target_flt_eval_method
7837 = targetm.c.excess_precision (requested_type);
7839 /* The target should not ask for unpredictable float evaluation (though
7840 it might advertise that implicitly the evaluation is unpredictable,
7841 but we don't care about that here, it will have been reported
7842 elsewhere). If it does ask for unpredictable evaluation, we have
7843 nothing to do here. */
7844 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7846 /* Nothing to do. The target has asked for all types we know about
7847 to be computed with their native precision and range. */
7848 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7849 return NULL_TREE;
7851 /* The target will promote this type in a target-dependent way, so excess
7852 precision ought to leave it alone. */
7853 if (targetm.promoted_type (type) != NULL_TREE)
7854 return NULL_TREE;
7856 machine_mode float16_type_mode = (float16_type_node
7857 ? TYPE_MODE (float16_type_node)
7858 : VOIDmode);
7859 machine_mode bfloat16_type_mode = (bfloat16_type_node
7860 ? TYPE_MODE (bfloat16_type_node)
7861 : VOIDmode);
7862 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7863 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7865 switch (TREE_CODE (type))
7867 case REAL_TYPE:
7869 machine_mode type_mode = TYPE_MODE (type);
7870 switch (target_flt_eval_method)
7872 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7873 if (type_mode == float16_type_mode
7874 || type_mode == bfloat16_type_mode)
7875 return float_type_node;
7876 break;
7877 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7878 if (type_mode == float16_type_mode
7879 || type_mode == bfloat16_type_mode
7880 || type_mode == float_type_mode)
7881 return double_type_node;
7882 break;
7883 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7884 if (type_mode == float16_type_mode
7885 || type_mode == bfloat16_type_mode
7886 || type_mode == float_type_mode
7887 || type_mode == double_type_mode)
7888 return long_double_type_node;
7889 break;
7890 default:
7891 gcc_unreachable ();
7893 break;
7895 case COMPLEX_TYPE:
7897 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7898 return NULL_TREE;
7899 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7900 switch (target_flt_eval_method)
7902 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7903 if (type_mode == float16_type_mode
7904 || type_mode == bfloat16_type_mode)
7905 return complex_float_type_node;
7906 break;
7907 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7908 if (type_mode == float16_type_mode
7909 || type_mode == bfloat16_type_mode
7910 || type_mode == float_type_mode)
7911 return complex_double_type_node;
7912 break;
7913 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7914 if (type_mode == float16_type_mode
7915 || type_mode == bfloat16_type_mode
7916 || type_mode == float_type_mode
7917 || type_mode == double_type_mode)
7918 return complex_long_double_type_node;
7919 break;
7920 default:
7921 gcc_unreachable ();
7923 break;
7925 default:
7926 break;
7929 return NULL_TREE;
7932 /* Return OP, stripped of any conversions to wider types as much as is safe.
7933 Converting the value back to OP's type makes a value equivalent to OP.
7935 If FOR_TYPE is nonzero, we return a value which, if converted to
7936 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7938 OP must have integer, real or enumeral type. Pointers are not allowed!
7940 There are some cases where the obvious value we could return
7941 would regenerate to OP if converted to OP's type,
7942 but would not extend like OP to wider types.
7943 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7944 For example, if OP is (unsigned short)(signed char)-1,
7945 we avoid returning (signed char)-1 if FOR_TYPE is int,
7946 even though extending that to an unsigned short would regenerate OP,
7947 since the result of extending (signed char)-1 to (int)
7948 is different from (int) OP. */
7950 tree
7951 get_unwidened (tree op, tree for_type)
7953 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7954 tree type = TREE_TYPE (op);
7955 unsigned final_prec
7956 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7957 int uns
7958 = (for_type != 0 && for_type != type
7959 && final_prec > TYPE_PRECISION (type)
7960 && TYPE_UNSIGNED (type));
7961 tree win = op;
7963 while (CONVERT_EXPR_P (op))
7965 int bitschange;
7967 /* TYPE_PRECISION on vector types has different meaning
7968 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7969 so avoid them here. */
7970 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7971 break;
7973 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7974 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7976 /* Truncations are many-one so cannot be removed.
7977 Unless we are later going to truncate down even farther. */
7978 if (bitschange < 0
7979 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7980 break;
7982 /* See what's inside this conversion. If we decide to strip it,
7983 we will set WIN. */
7984 op = TREE_OPERAND (op, 0);
7986 /* If we have not stripped any zero-extensions (uns is 0),
7987 we can strip any kind of extension.
7988 If we have previously stripped a zero-extension,
7989 only zero-extensions can safely be stripped.
7990 Any extension can be stripped if the bits it would produce
7991 are all going to be discarded later by truncating to FOR_TYPE. */
7993 if (bitschange > 0)
7995 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7996 win = op;
7997 /* TYPE_UNSIGNED says whether this is a zero-extension.
7998 Let's avoid computing it if it does not affect WIN
7999 and if UNS will not be needed again. */
8000 if ((uns
8001 || CONVERT_EXPR_P (op))
8002 && TYPE_UNSIGNED (TREE_TYPE (op)))
8004 uns = 1;
8005 win = op;
8010 /* If we finally reach a constant see if it fits in sth smaller and
8011 in that case convert it. */
8012 if (TREE_CODE (win) == INTEGER_CST)
8014 tree wtype = TREE_TYPE (win);
8015 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8016 if (for_type)
8017 prec = MAX (prec, final_prec);
8018 if (prec < TYPE_PRECISION (wtype))
8020 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8021 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8022 win = fold_convert (t, win);
8026 return win;
8029 /* Return OP or a simpler expression for a narrower value
8030 which can be sign-extended or zero-extended to give back OP.
8031 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8032 or 0 if the value should be sign-extended. */
8034 tree
8035 get_narrower (tree op, int *unsignedp_ptr)
8037 int uns = 0;
8038 bool first = true;
8039 tree win = op;
8040 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8042 if (TREE_CODE (op) == COMPOUND_EXPR)
8045 op = TREE_OPERAND (op, 1);
8046 while (TREE_CODE (op) == COMPOUND_EXPR);
8047 tree ret = get_narrower (op, unsignedp_ptr);
8048 if (ret == op)
8049 return win;
8050 auto_vec <tree, 16> v;
8051 unsigned int i;
8052 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8053 op = TREE_OPERAND (op, 1))
8054 v.safe_push (op);
8055 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8056 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8057 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8058 ret);
8059 return ret;
8061 while (TREE_CODE (op) == NOP_EXPR)
8063 int bitschange
8064 = (TYPE_PRECISION (TREE_TYPE (op))
8065 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8067 /* Truncations are many-one so cannot be removed. */
8068 if (bitschange < 0)
8069 break;
8071 /* See what's inside this conversion. If we decide to strip it,
8072 we will set WIN. */
8074 if (bitschange > 0)
8076 op = TREE_OPERAND (op, 0);
8077 /* An extension: the outermost one can be stripped,
8078 but remember whether it is zero or sign extension. */
8079 if (first)
8080 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8081 /* Otherwise, if a sign extension has been stripped,
8082 only sign extensions can now be stripped;
8083 if a zero extension has been stripped, only zero-extensions. */
8084 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8085 break;
8086 first = false;
8088 else /* bitschange == 0 */
8090 /* A change in nominal type can always be stripped, but we must
8091 preserve the unsignedness. */
8092 if (first)
8093 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8094 first = false;
8095 op = TREE_OPERAND (op, 0);
8096 /* Keep trying to narrow, but don't assign op to win if it
8097 would turn an integral type into something else. */
8098 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8099 continue;
8102 win = op;
8105 if (TREE_CODE (op) == COMPONENT_REF
8106 /* Since type_for_size always gives an integer type. */
8107 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8108 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8109 /* Ensure field is laid out already. */
8110 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8111 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8113 unsigned HOST_WIDE_INT innerprec
8114 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8115 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8116 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8117 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8119 /* We can get this structure field in a narrower type that fits it,
8120 but the resulting extension to its nominal type (a fullword type)
8121 must satisfy the same conditions as for other extensions.
8123 Do this only for fields that are aligned (not bit-fields),
8124 because when bit-field insns will be used there is no
8125 advantage in doing this. */
8127 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8128 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8129 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8130 && type != 0)
8132 if (first)
8133 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8134 win = fold_convert (type, op);
8138 *unsignedp_ptr = uns;
8139 return win;
8142 /* Return true if integer constant C has a value that is permissible
8143 for TYPE, an integral type. */
8145 bool
8146 int_fits_type_p (const_tree c, const_tree type)
8148 tree type_low_bound, type_high_bound;
8149 bool ok_for_low_bound, ok_for_high_bound;
8150 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8152 /* Non-standard boolean types can have arbitrary precision but various
8153 transformations assume that they can only take values 0 and +/-1. */
8154 if (TREE_CODE (type) == BOOLEAN_TYPE)
8155 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8157 retry:
8158 type_low_bound = TYPE_MIN_VALUE (type);
8159 type_high_bound = TYPE_MAX_VALUE (type);
8161 /* If at least one bound of the type is a constant integer, we can check
8162 ourselves and maybe make a decision. If no such decision is possible, but
8163 this type is a subtype, try checking against that. Otherwise, use
8164 fits_to_tree_p, which checks against the precision.
8166 Compute the status for each possibly constant bound, and return if we see
8167 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8168 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8169 for "constant known to fit". */
8171 /* Check if c >= type_low_bound. */
8172 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8174 if (tree_int_cst_lt (c, type_low_bound))
8175 return false;
8176 ok_for_low_bound = true;
8178 else
8179 ok_for_low_bound = false;
8181 /* Check if c <= type_high_bound. */
8182 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8184 if (tree_int_cst_lt (type_high_bound, c))
8185 return false;
8186 ok_for_high_bound = true;
8188 else
8189 ok_for_high_bound = false;
8191 /* If the constant fits both bounds, the result is known. */
8192 if (ok_for_low_bound && ok_for_high_bound)
8193 return true;
8195 /* Perform some generic filtering which may allow making a decision
8196 even if the bounds are not constant. First, negative integers
8197 never fit in unsigned types, */
8198 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8199 return false;
8201 /* Second, narrower types always fit in wider ones. */
8202 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8203 return true;
8205 /* Third, unsigned integers with top bit set never fit signed types. */
8206 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8208 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8209 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8211 /* When a tree_cst is converted to a wide-int, the precision
8212 is taken from the type. However, if the precision of the
8213 mode underneath the type is smaller than that, it is
8214 possible that the value will not fit. The test below
8215 fails if any bit is set between the sign bit of the
8216 underlying mode and the top bit of the type. */
8217 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8218 return false;
8220 else if (wi::neg_p (wi::to_wide (c)))
8221 return false;
8224 /* If we haven't been able to decide at this point, there nothing more we
8225 can check ourselves here. Look at the base type if we have one and it
8226 has the same precision. */
8227 if (TREE_CODE (type) == INTEGER_TYPE
8228 && TREE_TYPE (type) != 0
8229 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8231 type = TREE_TYPE (type);
8232 goto retry;
8235 /* Or to fits_to_tree_p, if nothing else. */
8236 return wi::fits_to_tree_p (wi::to_wide (c), type);
8239 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8240 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8241 represented (assuming two's-complement arithmetic) within the bit
8242 precision of the type are returned instead. */
8244 void
8245 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8247 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8248 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8249 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8250 else
8252 if (TYPE_UNSIGNED (type))
8253 mpz_set_ui (min, 0);
8254 else
8256 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8257 wi::to_mpz (mn, min, SIGNED);
8261 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8262 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8263 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8264 else
8266 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8267 wi::to_mpz (mn, max, TYPE_SIGN (type));
8271 /* Return true if VAR is an automatic variable. */
8273 bool
8274 auto_var_p (const_tree var)
8276 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8277 || TREE_CODE (var) == PARM_DECL)
8278 && ! TREE_STATIC (var))
8279 || TREE_CODE (var) == RESULT_DECL);
8282 /* Return true if VAR is an automatic variable defined in function FN. */
8284 bool
8285 auto_var_in_fn_p (const_tree var, const_tree fn)
8287 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8288 && (auto_var_p (var)
8289 || TREE_CODE (var) == LABEL_DECL));
8292 /* Subprogram of following function. Called by walk_tree.
8294 Return *TP if it is an automatic variable or parameter of the
8295 function passed in as DATA. */
8297 static tree
8298 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8300 tree fn = (tree) data;
8302 if (TYPE_P (*tp))
8303 *walk_subtrees = 0;
8305 else if (DECL_P (*tp)
8306 && auto_var_in_fn_p (*tp, fn))
8307 return *tp;
8309 return NULL_TREE;
8312 /* Returns true if T is, contains, or refers to a type with variable
8313 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8314 arguments, but not the return type. If FN is nonzero, only return
8315 true if a modifier of the type or position of FN is a variable or
8316 parameter inside FN.
8318 This concept is more general than that of C99 'variably modified types':
8319 in C99, a struct type is never variably modified because a VLA may not
8320 appear as a structure member. However, in GNU C code like:
8322 struct S { int i[f()]; };
8324 is valid, and other languages may define similar constructs. */
8326 bool
8327 variably_modified_type_p (tree type, tree fn)
8329 tree t;
8331 /* Test if T is either variable (if FN is zero) or an expression containing
8332 a variable in FN. If TYPE isn't gimplified, return true also if
8333 gimplify_one_sizepos would gimplify the expression into a local
8334 variable. */
8335 #define RETURN_TRUE_IF_VAR(T) \
8336 do { tree _t = (T); \
8337 if (_t != NULL_TREE \
8338 && _t != error_mark_node \
8339 && !CONSTANT_CLASS_P (_t) \
8340 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8341 && (!fn \
8342 || (!TYPE_SIZES_GIMPLIFIED (type) \
8343 && (TREE_CODE (_t) != VAR_DECL \
8344 && !CONTAINS_PLACEHOLDER_P (_t))) \
8345 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8346 return true; } while (0)
8348 if (type == error_mark_node)
8349 return false;
8351 /* If TYPE itself has variable size, it is variably modified. */
8352 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8353 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8355 switch (TREE_CODE (type))
8357 case POINTER_TYPE:
8358 case REFERENCE_TYPE:
8359 case VECTOR_TYPE:
8360 /* Ada can have pointer types refering to themselves indirectly. */
8361 if (TREE_VISITED (type))
8362 return false;
8363 TREE_VISITED (type) = true;
8364 if (variably_modified_type_p (TREE_TYPE (type), fn))
8366 TREE_VISITED (type) = false;
8367 return true;
8369 TREE_VISITED (type) = false;
8370 break;
8372 case FUNCTION_TYPE:
8373 case METHOD_TYPE:
8374 /* If TYPE is a function type, it is variably modified if the
8375 return type is variably modified. */
8376 if (variably_modified_type_p (TREE_TYPE (type), fn))
8377 return true;
8378 break;
8380 case INTEGER_TYPE:
8381 case REAL_TYPE:
8382 case FIXED_POINT_TYPE:
8383 case ENUMERAL_TYPE:
8384 case BOOLEAN_TYPE:
8385 /* Scalar types are variably modified if their end points
8386 aren't constant. */
8387 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8388 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8389 break;
8391 case RECORD_TYPE:
8392 case UNION_TYPE:
8393 case QUAL_UNION_TYPE:
8394 /* We can't see if any of the fields are variably-modified by the
8395 definition we normally use, since that would produce infinite
8396 recursion via pointers. */
8397 /* This is variably modified if some field's type is. */
8398 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8399 if (TREE_CODE (t) == FIELD_DECL)
8401 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8402 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8403 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8405 /* If the type is a qualified union, then the DECL_QUALIFIER
8406 of fields can also be an expression containing a variable. */
8407 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8408 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8410 /* If the field is a qualified union, then it's only a container
8411 for what's inside so we look into it. That's necessary in LTO
8412 mode because the sizes of the field tested above have been set
8413 to PLACEHOLDER_EXPRs by free_lang_data. */
8414 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8415 && variably_modified_type_p (TREE_TYPE (t), fn))
8416 return true;
8418 break;
8420 case ARRAY_TYPE:
8421 /* Do not call ourselves to avoid infinite recursion. This is
8422 variably modified if the element type is. */
8423 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8424 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8425 break;
8427 default:
8428 break;
8431 /* The current language may have other cases to check, but in general,
8432 all other types are not variably modified. */
8433 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8435 #undef RETURN_TRUE_IF_VAR
8438 /* Given a DECL or TYPE, return the scope in which it was declared, or
8439 NULL_TREE if there is no containing scope. */
8441 tree
8442 get_containing_scope (const_tree t)
8444 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8447 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8449 const_tree
8450 get_ultimate_context (const_tree decl)
8452 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8454 if (TREE_CODE (decl) == BLOCK)
8455 decl = BLOCK_SUPERCONTEXT (decl);
8456 else
8457 decl = get_containing_scope (decl);
8459 return decl;
8462 /* Return the innermost context enclosing DECL that is
8463 a FUNCTION_DECL, or zero if none. */
8465 tree
8466 decl_function_context (const_tree decl)
8468 tree context;
8470 if (TREE_CODE (decl) == ERROR_MARK)
8471 return 0;
8473 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8474 where we look up the function at runtime. Such functions always take
8475 a first argument of type 'pointer to real context'.
8477 C++ should really be fixed to use DECL_CONTEXT for the real context,
8478 and use something else for the "virtual context". */
8479 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8480 context
8481 = TYPE_MAIN_VARIANT
8482 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8483 else
8484 context = DECL_CONTEXT (decl);
8486 while (context && TREE_CODE (context) != FUNCTION_DECL)
8488 if (TREE_CODE (context) == BLOCK)
8489 context = BLOCK_SUPERCONTEXT (context);
8490 else
8491 context = get_containing_scope (context);
8494 return context;
8497 /* Return the innermost context enclosing DECL that is
8498 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8499 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8501 tree
8502 decl_type_context (const_tree decl)
8504 tree context = DECL_CONTEXT (decl);
8506 while (context)
8507 switch (TREE_CODE (context))
8509 case NAMESPACE_DECL:
8510 case TRANSLATION_UNIT_DECL:
8511 return NULL_TREE;
8513 case RECORD_TYPE:
8514 case UNION_TYPE:
8515 case QUAL_UNION_TYPE:
8516 return context;
8518 case TYPE_DECL:
8519 case FUNCTION_DECL:
8520 context = DECL_CONTEXT (context);
8521 break;
8523 case BLOCK:
8524 context = BLOCK_SUPERCONTEXT (context);
8525 break;
8527 default:
8528 gcc_unreachable ();
8531 return NULL_TREE;
8534 /* CALL is a CALL_EXPR. Return the declaration for the function
8535 called, or NULL_TREE if the called function cannot be
8536 determined. */
8538 tree
8539 get_callee_fndecl (const_tree call)
8541 tree addr;
8543 if (call == error_mark_node)
8544 return error_mark_node;
8546 /* It's invalid to call this function with anything but a
8547 CALL_EXPR. */
8548 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8550 /* The first operand to the CALL is the address of the function
8551 called. */
8552 addr = CALL_EXPR_FN (call);
8554 /* If there is no function, return early. */
8555 if (addr == NULL_TREE)
8556 return NULL_TREE;
8558 STRIP_NOPS (addr);
8560 /* If this is a readonly function pointer, extract its initial value. */
8561 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8562 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8563 && DECL_INITIAL (addr))
8564 addr = DECL_INITIAL (addr);
8566 /* If the address is just `&f' for some function `f', then we know
8567 that `f' is being called. */
8568 if (TREE_CODE (addr) == ADDR_EXPR
8569 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8570 return TREE_OPERAND (addr, 0);
8572 /* We couldn't figure out what was being called. */
8573 return NULL_TREE;
8576 /* Return true when STMTs arguments and return value match those of FNDECL,
8577 a decl of a builtin function. */
8579 static bool
8580 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8582 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8584 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8585 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8586 fndecl = decl;
8588 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8589 if (gimple_form
8590 ? !useless_type_conversion_p (TREE_TYPE (call),
8591 TREE_TYPE (TREE_TYPE (fndecl)))
8592 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8593 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8594 return false;
8596 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8597 unsigned nargs = call_expr_nargs (call);
8598 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8600 /* Variadic args follow. */
8601 if (!targs)
8602 return true;
8603 tree arg = CALL_EXPR_ARG (call, i);
8604 tree type = TREE_VALUE (targs);
8605 if (gimple_form
8606 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8607 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8609 /* For pointer arguments be more forgiving, e.g. due to
8610 FILE * vs. fileptr_type_node, or say char * vs. const char *
8611 differences etc. */
8612 if (!gimple_form
8613 && POINTER_TYPE_P (type)
8614 && POINTER_TYPE_P (TREE_TYPE (arg))
8615 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8616 continue;
8617 /* char/short integral arguments are promoted to int
8618 by several frontends if targetm.calls.promote_prototypes
8619 is true. Allow such promotion too. */
8620 if (INTEGRAL_TYPE_P (type)
8621 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8622 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8623 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8624 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8625 && (gimple_form
8626 ? useless_type_conversion_p (integer_type_node,
8627 TREE_TYPE (arg))
8628 : tree_nop_conversion_p (integer_type_node,
8629 TREE_TYPE (arg))))
8630 continue;
8631 return false;
8634 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8635 return false;
8636 return true;
8639 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8640 return the associated function code, otherwise return CFN_LAST. */
8642 combined_fn
8643 get_call_combined_fn (const_tree call)
8645 /* It's invalid to call this function with anything but a CALL_EXPR. */
8646 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8648 if (!CALL_EXPR_FN (call))
8649 return as_combined_fn (CALL_EXPR_IFN (call));
8651 tree fndecl = get_callee_fndecl (call);
8652 if (fndecl
8653 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8654 && tree_builtin_call_types_compatible_p (call, fndecl))
8655 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8657 return CFN_LAST;
8660 /* Comparator of indices based on tree_node_counts. */
8662 static int
8663 tree_nodes_cmp (const void *p1, const void *p2)
8665 const unsigned *n1 = (const unsigned *)p1;
8666 const unsigned *n2 = (const unsigned *)p2;
8668 return tree_node_counts[*n1] - tree_node_counts[*n2];
8671 /* Comparator of indices based on tree_code_counts. */
8673 static int
8674 tree_codes_cmp (const void *p1, const void *p2)
8676 const unsigned *n1 = (const unsigned *)p1;
8677 const unsigned *n2 = (const unsigned *)p2;
8679 return tree_code_counts[*n1] - tree_code_counts[*n2];
8682 #define TREE_MEM_USAGE_SPACES 40
8684 /* Print debugging information about tree nodes generated during the compile,
8685 and any language-specific information. */
8687 void
8688 dump_tree_statistics (void)
8690 if (GATHER_STATISTICS)
8692 uint64_t total_nodes, total_bytes;
8693 fprintf (stderr, "\nKind Nodes Bytes\n");
8694 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8695 total_nodes = total_bytes = 0;
8698 auto_vec<unsigned> indices (all_kinds);
8699 for (unsigned i = 0; i < all_kinds; i++)
8700 indices.quick_push (i);
8701 indices.qsort (tree_nodes_cmp);
8703 for (unsigned i = 0; i < (int) all_kinds; i++)
8705 unsigned j = indices[i];
8706 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8707 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8708 SIZE_AMOUNT (tree_node_sizes[j]));
8709 total_nodes += tree_node_counts[j];
8710 total_bytes += tree_node_sizes[j];
8712 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8713 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8714 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8715 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8719 fprintf (stderr, "Code Nodes\n");
8720 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8722 auto_vec<unsigned> indices (MAX_TREE_CODES);
8723 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8724 indices.quick_push (i);
8725 indices.qsort (tree_codes_cmp);
8727 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8729 unsigned j = indices[i];
8730 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8731 get_tree_code_name ((enum tree_code) j),
8732 SIZE_AMOUNT (tree_code_counts[j]));
8734 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8735 fprintf (stderr, "\n");
8736 ssanames_print_statistics ();
8737 fprintf (stderr, "\n");
8738 phinodes_print_statistics ();
8739 fprintf (stderr, "\n");
8742 else
8743 fprintf (stderr, "(No per-node statistics)\n");
8745 print_type_hash_statistics ();
8746 print_debug_expr_statistics ();
8747 print_value_expr_statistics ();
8748 lang_hooks.print_statistics ();
8751 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8753 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8755 unsigned
8756 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8758 /* This relies on the raw feedback's top 4 bits being zero. */
8759 #define FEEDBACK(X) ((X) * 0x04c11db7)
8760 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8761 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8762 static const unsigned syndromes[16] =
8764 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8765 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8766 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8767 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8769 #undef FEEDBACK
8770 #undef SYNDROME
8772 value <<= (32 - bytes * 8);
8773 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8775 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8777 chksum = (chksum << 4) ^ feedback;
8780 return chksum;
8783 /* Generate a crc32 of a string. */
8785 unsigned
8786 crc32_string (unsigned chksum, const char *string)
8789 chksum = crc32_byte (chksum, *string);
8790 while (*string++);
8791 return chksum;
8794 /* P is a string that will be used in a symbol. Mask out any characters
8795 that are not valid in that context. */
8797 void
8798 clean_symbol_name (char *p)
8800 for (; *p; p++)
8801 if (! (ISALNUM (*p)
8802 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8803 || *p == '$'
8804 #endif
8805 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8806 || *p == '.'
8807 #endif
8809 *p = '_';
8812 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8814 /* Create a unique anonymous identifier. The identifier is still a
8815 valid assembly label. */
8817 tree
8818 make_anon_name ()
8820 const char *fmt =
8821 #if !defined (NO_DOT_IN_LABEL)
8823 #elif !defined (NO_DOLLAR_IN_LABEL)
8825 #else
8827 #endif
8828 "_anon_%d";
8830 char buf[24];
8831 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8832 gcc_checking_assert (len < int (sizeof (buf)));
8834 tree id = get_identifier_with_length (buf, len);
8835 IDENTIFIER_ANON_P (id) = true;
8837 return id;
8840 /* Generate a name for a special-purpose function.
8841 The generated name may need to be unique across the whole link.
8842 Changes to this function may also require corresponding changes to
8843 xstrdup_mask_random.
8844 TYPE is some string to identify the purpose of this function to the
8845 linker or collect2; it must start with an uppercase letter,
8846 one of:
8847 I - for constructors
8848 D - for destructors
8849 N - for C++ anonymous namespaces
8850 F - for DWARF unwind frame information. */
8852 tree
8853 get_file_function_name (const char *type)
8855 char *buf;
8856 const char *p;
8857 char *q;
8859 /* If we already have a name we know to be unique, just use that. */
8860 if (first_global_object_name)
8861 p = q = ASTRDUP (first_global_object_name);
8862 /* If the target is handling the constructors/destructors, they
8863 will be local to this file and the name is only necessary for
8864 debugging purposes.
8865 We also assign sub_I and sub_D sufixes to constructors called from
8866 the global static constructors. These are always local. */
8867 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8868 || (startswith (type, "sub_")
8869 && (type[4] == 'I' || type[4] == 'D')))
8871 const char *file = main_input_filename;
8872 if (! file)
8873 file = LOCATION_FILE (input_location);
8874 /* Just use the file's basename, because the full pathname
8875 might be quite long. */
8876 p = q = ASTRDUP (lbasename (file));
8878 else
8880 /* Otherwise, the name must be unique across the entire link.
8881 We don't have anything that we know to be unique to this translation
8882 unit, so use what we do have and throw in some randomness. */
8883 unsigned len;
8884 const char *name = weak_global_object_name;
8885 const char *file = main_input_filename;
8887 if (! name)
8888 name = "";
8889 if (! file)
8890 file = LOCATION_FILE (input_location);
8892 len = strlen (file);
8893 q = (char *) alloca (9 + 19 + len + 1);
8894 memcpy (q, file, len + 1);
8896 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8897 crc32_string (0, name), get_random_seed (false));
8899 p = q;
8902 clean_symbol_name (q);
8903 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8904 + strlen (type));
8906 /* Set up the name of the file-level functions we may need.
8907 Use a global object (which is already required to be unique over
8908 the program) rather than the file name (which imposes extra
8909 constraints). */
8910 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8912 return get_identifier (buf);
8915 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8917 /* Complain that the tree code of NODE does not match the expected 0
8918 terminated list of trailing codes. The trailing code list can be
8919 empty, for a more vague error message. FILE, LINE, and FUNCTION
8920 are of the caller. */
8922 void
8923 tree_check_failed (const_tree node, const char *file,
8924 int line, const char *function, ...)
8926 va_list args;
8927 const char *buffer;
8928 unsigned length = 0;
8929 enum tree_code code;
8931 va_start (args, function);
8932 while ((code = (enum tree_code) va_arg (args, int)))
8933 length += 4 + strlen (get_tree_code_name (code));
8934 va_end (args);
8935 if (length)
8937 char *tmp;
8938 va_start (args, function);
8939 length += strlen ("expected ");
8940 buffer = tmp = (char *) alloca (length);
8941 length = 0;
8942 while ((code = (enum tree_code) va_arg (args, int)))
8944 const char *prefix = length ? " or " : "expected ";
8946 strcpy (tmp + length, prefix);
8947 length += strlen (prefix);
8948 strcpy (tmp + length, get_tree_code_name (code));
8949 length += strlen (get_tree_code_name (code));
8951 va_end (args);
8953 else
8954 buffer = "unexpected node";
8956 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8957 buffer, get_tree_code_name (TREE_CODE (node)),
8958 function, trim_filename (file), line);
8961 /* Complain that the tree code of NODE does match the expected 0
8962 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8963 the caller. */
8965 void
8966 tree_not_check_failed (const_tree node, const char *file,
8967 int line, const char *function, ...)
8969 va_list args;
8970 char *buffer;
8971 unsigned length = 0;
8972 enum tree_code code;
8974 va_start (args, function);
8975 while ((code = (enum tree_code) va_arg (args, int)))
8976 length += 4 + strlen (get_tree_code_name (code));
8977 va_end (args);
8978 va_start (args, function);
8979 buffer = (char *) alloca (length);
8980 length = 0;
8981 while ((code = (enum tree_code) va_arg (args, int)))
8983 if (length)
8985 strcpy (buffer + length, " or ");
8986 length += 4;
8988 strcpy (buffer + length, get_tree_code_name (code));
8989 length += strlen (get_tree_code_name (code));
8991 va_end (args);
8993 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8994 buffer, get_tree_code_name (TREE_CODE (node)),
8995 function, trim_filename (file), line);
8998 /* Similar to tree_check_failed, except that we check for a class of tree
8999 code, given in CL. */
9001 void
9002 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9003 const char *file, int line, const char *function)
9005 internal_error
9006 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9007 TREE_CODE_CLASS_STRING (cl),
9008 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9009 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9012 /* Similar to tree_check_failed, except that instead of specifying a
9013 dozen codes, use the knowledge that they're all sequential. */
9015 void
9016 tree_range_check_failed (const_tree node, const char *file, int line,
9017 const char *function, enum tree_code c1,
9018 enum tree_code c2)
9020 char *buffer;
9021 unsigned length = 0;
9022 unsigned int c;
9024 for (c = c1; c <= c2; ++c)
9025 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9027 length += strlen ("expected ");
9028 buffer = (char *) alloca (length);
9029 length = 0;
9031 for (c = c1; c <= c2; ++c)
9033 const char *prefix = length ? " or " : "expected ";
9035 strcpy (buffer + length, prefix);
9036 length += strlen (prefix);
9037 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9038 length += strlen (get_tree_code_name ((enum tree_code) c));
9041 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9042 buffer, get_tree_code_name (TREE_CODE (node)),
9043 function, trim_filename (file), line);
9047 /* Similar to tree_check_failed, except that we check that a tree does
9048 not have the specified code, given in CL. */
9050 void
9051 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9052 const char *file, int line, const char *function)
9054 internal_error
9055 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9056 TREE_CODE_CLASS_STRING (cl),
9057 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9058 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9062 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9064 void
9065 omp_clause_check_failed (const_tree node, const char *file, int line,
9066 const char *function, enum omp_clause_code code)
9068 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9069 "in %s, at %s:%d",
9070 omp_clause_code_name[code],
9071 get_tree_code_name (TREE_CODE (node)),
9072 function, trim_filename (file), line);
9076 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9078 void
9079 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9080 const char *function, enum omp_clause_code c1,
9081 enum omp_clause_code c2)
9083 char *buffer;
9084 unsigned length = 0;
9085 unsigned int c;
9087 for (c = c1; c <= c2; ++c)
9088 length += 4 + strlen (omp_clause_code_name[c]);
9090 length += strlen ("expected ");
9091 buffer = (char *) alloca (length);
9092 length = 0;
9094 for (c = c1; c <= c2; ++c)
9096 const char *prefix = length ? " or " : "expected ";
9098 strcpy (buffer + length, prefix);
9099 length += strlen (prefix);
9100 strcpy (buffer + length, omp_clause_code_name[c]);
9101 length += strlen (omp_clause_code_name[c]);
9104 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9105 buffer, omp_clause_code_name[TREE_CODE (node)],
9106 function, trim_filename (file), line);
9110 #undef DEFTREESTRUCT
9111 #define DEFTREESTRUCT(VAL, NAME) NAME,
9113 static const char *ts_enum_names[] = {
9114 #include "treestruct.def"
9116 #undef DEFTREESTRUCT
9118 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9120 /* Similar to tree_class_check_failed, except that we check for
9121 whether CODE contains the tree structure identified by EN. */
9123 void
9124 tree_contains_struct_check_failed (const_tree node,
9125 const enum tree_node_structure_enum en,
9126 const char *file, int line,
9127 const char *function)
9129 internal_error
9130 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9131 TS_ENUM_NAME (en),
9132 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9136 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9137 (dynamically sized) vector. */
9139 void
9140 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9141 const char *function)
9143 internal_error
9144 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9145 "at %s:%d",
9146 idx + 1, len, function, trim_filename (file), line);
9149 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9150 (dynamically sized) vector. */
9152 void
9153 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9154 const char *function)
9156 internal_error
9157 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9158 idx + 1, len, function, trim_filename (file), line);
9161 /* Similar to above, except that the check is for the bounds of the operand
9162 vector of an expression node EXP. */
9164 void
9165 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9166 int line, const char *function)
9168 enum tree_code code = TREE_CODE (exp);
9169 internal_error
9170 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9171 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9172 function, trim_filename (file), line);
9175 /* Similar to above, except that the check is for the number of
9176 operands of an OMP_CLAUSE node. */
9178 void
9179 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9180 int line, const char *function)
9182 internal_error
9183 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9184 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9185 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9186 trim_filename (file), line);
9188 #endif /* ENABLE_TREE_CHECKING */
9190 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9191 and mapped to the machine mode MODE. Initialize its fields and build
9192 the information necessary for debugging output. */
9194 static tree
9195 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9197 tree t;
9198 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9200 t = make_node (VECTOR_TYPE);
9201 TREE_TYPE (t) = mv_innertype;
9202 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9203 SET_TYPE_MODE (t, mode);
9205 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9206 SET_TYPE_STRUCTURAL_EQUALITY (t);
9207 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9208 || mode != VOIDmode)
9209 && !VECTOR_BOOLEAN_TYPE_P (t))
9210 TYPE_CANONICAL (t)
9211 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9213 layout_type (t);
9215 hashval_t hash = type_hash_canon_hash (t);
9216 t = type_hash_canon (hash, t);
9218 /* We have built a main variant, based on the main variant of the
9219 inner type. Use it to build the variant we return. */
9220 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9221 && TREE_TYPE (t) != innertype)
9222 return build_type_attribute_qual_variant (t,
9223 TYPE_ATTRIBUTES (innertype),
9224 TYPE_QUALS (innertype));
9226 return t;
9229 static tree
9230 make_or_reuse_type (unsigned size, int unsignedp)
9232 int i;
9234 if (size == INT_TYPE_SIZE)
9235 return unsignedp ? unsigned_type_node : integer_type_node;
9236 if (size == CHAR_TYPE_SIZE)
9237 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9238 if (size == SHORT_TYPE_SIZE)
9239 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9240 if (size == LONG_TYPE_SIZE)
9241 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9242 if (size == LONG_LONG_TYPE_SIZE)
9243 return (unsignedp ? long_long_unsigned_type_node
9244 : long_long_integer_type_node);
9246 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9247 if (size == int_n_data[i].bitsize
9248 && int_n_enabled_p[i])
9249 return (unsignedp ? int_n_trees[i].unsigned_type
9250 : int_n_trees[i].signed_type);
9252 if (unsignedp)
9253 return make_unsigned_type (size);
9254 else
9255 return make_signed_type (size);
9258 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9260 static tree
9261 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9263 if (satp)
9265 if (size == SHORT_FRACT_TYPE_SIZE)
9266 return unsignedp ? sat_unsigned_short_fract_type_node
9267 : sat_short_fract_type_node;
9268 if (size == FRACT_TYPE_SIZE)
9269 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9270 if (size == LONG_FRACT_TYPE_SIZE)
9271 return unsignedp ? sat_unsigned_long_fract_type_node
9272 : sat_long_fract_type_node;
9273 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9274 return unsignedp ? sat_unsigned_long_long_fract_type_node
9275 : sat_long_long_fract_type_node;
9277 else
9279 if (size == SHORT_FRACT_TYPE_SIZE)
9280 return unsignedp ? unsigned_short_fract_type_node
9281 : short_fract_type_node;
9282 if (size == FRACT_TYPE_SIZE)
9283 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9284 if (size == LONG_FRACT_TYPE_SIZE)
9285 return unsignedp ? unsigned_long_fract_type_node
9286 : long_fract_type_node;
9287 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9288 return unsignedp ? unsigned_long_long_fract_type_node
9289 : long_long_fract_type_node;
9292 return make_fract_type (size, unsignedp, satp);
9295 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9297 static tree
9298 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9300 if (satp)
9302 if (size == SHORT_ACCUM_TYPE_SIZE)
9303 return unsignedp ? sat_unsigned_short_accum_type_node
9304 : sat_short_accum_type_node;
9305 if (size == ACCUM_TYPE_SIZE)
9306 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9307 if (size == LONG_ACCUM_TYPE_SIZE)
9308 return unsignedp ? sat_unsigned_long_accum_type_node
9309 : sat_long_accum_type_node;
9310 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9311 return unsignedp ? sat_unsigned_long_long_accum_type_node
9312 : sat_long_long_accum_type_node;
9314 else
9316 if (size == SHORT_ACCUM_TYPE_SIZE)
9317 return unsignedp ? unsigned_short_accum_type_node
9318 : short_accum_type_node;
9319 if (size == ACCUM_TYPE_SIZE)
9320 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9321 if (size == LONG_ACCUM_TYPE_SIZE)
9322 return unsignedp ? unsigned_long_accum_type_node
9323 : long_accum_type_node;
9324 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9325 return unsignedp ? unsigned_long_long_accum_type_node
9326 : long_long_accum_type_node;
9329 return make_accum_type (size, unsignedp, satp);
9333 /* Create an atomic variant node for TYPE. This routine is called
9334 during initialization of data types to create the 5 basic atomic
9335 types. The generic build_variant_type function requires these to
9336 already be set up in order to function properly, so cannot be
9337 called from there. If ALIGN is non-zero, then ensure alignment is
9338 overridden to this value. */
9340 static tree
9341 build_atomic_base (tree type, unsigned int align)
9343 tree t;
9345 /* Make sure its not already registered. */
9346 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9347 return t;
9349 t = build_variant_type_copy (type);
9350 set_type_quals (t, TYPE_QUAL_ATOMIC);
9352 if (align)
9353 SET_TYPE_ALIGN (t, align);
9355 return t;
9358 /* Information about the _FloatN and _FloatNx types. This must be in
9359 the same order as the corresponding TI_* enum values. */
9360 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9362 { 16, false },
9363 { 32, false },
9364 { 64, false },
9365 { 128, false },
9366 { 32, true },
9367 { 64, true },
9368 { 128, true },
9372 /* Create nodes for all integer types (and error_mark_node) using the sizes
9373 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9375 void
9376 build_common_tree_nodes (bool signed_char)
9378 int i;
9380 error_mark_node = make_node (ERROR_MARK);
9381 TREE_TYPE (error_mark_node) = error_mark_node;
9383 initialize_sizetypes ();
9385 /* Define both `signed char' and `unsigned char'. */
9386 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9387 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9388 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9389 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9391 /* Define `char', which is like either `signed char' or `unsigned char'
9392 but not the same as either. */
9393 char_type_node
9394 = (signed_char
9395 ? make_signed_type (CHAR_TYPE_SIZE)
9396 : make_unsigned_type (CHAR_TYPE_SIZE));
9397 TYPE_STRING_FLAG (char_type_node) = 1;
9399 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9400 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9401 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9402 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9403 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9404 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9405 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9406 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9408 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9410 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9411 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9413 if (int_n_enabled_p[i])
9415 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9416 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9420 /* Define a boolean type. This type only represents boolean values but
9421 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9422 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9423 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9424 TYPE_PRECISION (boolean_type_node) = 1;
9425 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9427 /* Define what type to use for size_t. */
9428 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9429 size_type_node = unsigned_type_node;
9430 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9431 size_type_node = long_unsigned_type_node;
9432 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9433 size_type_node = long_long_unsigned_type_node;
9434 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9435 size_type_node = short_unsigned_type_node;
9436 else
9438 int i;
9440 size_type_node = NULL_TREE;
9441 for (i = 0; i < NUM_INT_N_ENTS; i++)
9442 if (int_n_enabled_p[i])
9444 char name[50], altname[50];
9445 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9446 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9448 if (strcmp (name, SIZE_TYPE) == 0
9449 || strcmp (altname, SIZE_TYPE) == 0)
9451 size_type_node = int_n_trees[i].unsigned_type;
9454 if (size_type_node == NULL_TREE)
9455 gcc_unreachable ();
9458 /* Define what type to use for ptrdiff_t. */
9459 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9460 ptrdiff_type_node = integer_type_node;
9461 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9462 ptrdiff_type_node = long_integer_type_node;
9463 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9464 ptrdiff_type_node = long_long_integer_type_node;
9465 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9466 ptrdiff_type_node = short_integer_type_node;
9467 else
9469 ptrdiff_type_node = NULL_TREE;
9470 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9471 if (int_n_enabled_p[i])
9473 char name[50], altname[50];
9474 sprintf (name, "__int%d", int_n_data[i].bitsize);
9475 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9477 if (strcmp (name, PTRDIFF_TYPE) == 0
9478 || strcmp (altname, PTRDIFF_TYPE) == 0)
9479 ptrdiff_type_node = int_n_trees[i].signed_type;
9481 if (ptrdiff_type_node == NULL_TREE)
9482 gcc_unreachable ();
9485 /* Fill in the rest of the sized types. Reuse existing type nodes
9486 when possible. */
9487 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9488 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9489 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9490 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9491 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9493 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9494 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9495 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9496 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9497 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9499 /* Don't call build_qualified type for atomics. That routine does
9500 special processing for atomics, and until they are initialized
9501 it's better not to make that call.
9503 Check to see if there is a target override for atomic types. */
9505 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9506 targetm.atomic_align_for_mode (QImode));
9507 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9508 targetm.atomic_align_for_mode (HImode));
9509 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9510 targetm.atomic_align_for_mode (SImode));
9511 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9512 targetm.atomic_align_for_mode (DImode));
9513 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9514 targetm.atomic_align_for_mode (TImode));
9516 access_public_node = get_identifier ("public");
9517 access_protected_node = get_identifier ("protected");
9518 access_private_node = get_identifier ("private");
9520 /* Define these next since types below may used them. */
9521 integer_zero_node = build_int_cst (integer_type_node, 0);
9522 integer_one_node = build_int_cst (integer_type_node, 1);
9523 integer_three_node = build_int_cst (integer_type_node, 3);
9524 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9526 size_zero_node = size_int (0);
9527 size_one_node = size_int (1);
9528 bitsize_zero_node = bitsize_int (0);
9529 bitsize_one_node = bitsize_int (1);
9530 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9532 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9533 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9535 void_type_node = make_node (VOID_TYPE);
9536 layout_type (void_type_node);
9538 /* We are not going to have real types in C with less than byte alignment,
9539 so we might as well not have any types that claim to have it. */
9540 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9541 TYPE_USER_ALIGN (void_type_node) = 0;
9543 void_node = make_node (VOID_CST);
9544 TREE_TYPE (void_node) = void_type_node;
9546 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9548 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9549 layout_type (TREE_TYPE (null_pointer_node));
9551 ptr_type_node = build_pointer_type (void_type_node);
9552 const_ptr_type_node
9553 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9554 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9555 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9557 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9559 float_type_node = make_node (REAL_TYPE);
9560 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9561 layout_type (float_type_node);
9563 double_type_node = make_node (REAL_TYPE);
9564 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9565 layout_type (double_type_node);
9567 long_double_type_node = make_node (REAL_TYPE);
9568 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9569 layout_type (long_double_type_node);
9571 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9573 int n = floatn_nx_types[i].n;
9574 bool extended = floatn_nx_types[i].extended;
9575 scalar_float_mode mode;
9576 if (!targetm.floatn_mode (n, extended).exists (&mode))
9577 continue;
9578 int precision = GET_MODE_PRECISION (mode);
9579 /* Work around the rs6000 KFmode having precision 113 not
9580 128. */
9581 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9582 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9583 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9584 if (!extended)
9585 gcc_assert (min_precision == n);
9586 if (precision < min_precision)
9587 precision = min_precision;
9588 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9589 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9590 layout_type (FLOATN_NX_TYPE_NODE (i));
9591 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9593 float128t_type_node = float128_type_node;
9594 #ifdef HAVE_BFmode
9595 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9596 && targetm.scalar_mode_supported_p (BFmode)
9597 && targetm.libgcc_floating_mode_supported_p (BFmode))
9599 bfloat16_type_node = make_node (REAL_TYPE);
9600 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9601 layout_type (bfloat16_type_node);
9602 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9604 #endif
9606 float_ptr_type_node = build_pointer_type (float_type_node);
9607 double_ptr_type_node = build_pointer_type (double_type_node);
9608 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9609 integer_ptr_type_node = build_pointer_type (integer_type_node);
9611 /* Fixed size integer types. */
9612 uint16_type_node = make_or_reuse_type (16, 1);
9613 uint32_type_node = make_or_reuse_type (32, 1);
9614 uint64_type_node = make_or_reuse_type (64, 1);
9615 if (targetm.scalar_mode_supported_p (TImode))
9616 uint128_type_node = make_or_reuse_type (128, 1);
9618 /* Decimal float types. */
9619 if (targetm.decimal_float_supported_p ())
9621 dfloat32_type_node = make_node (REAL_TYPE);
9622 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9623 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9624 layout_type (dfloat32_type_node);
9626 dfloat64_type_node = make_node (REAL_TYPE);
9627 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9628 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9629 layout_type (dfloat64_type_node);
9631 dfloat128_type_node = make_node (REAL_TYPE);
9632 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9633 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9634 layout_type (dfloat128_type_node);
9637 complex_integer_type_node = build_complex_type (integer_type_node, true);
9638 complex_float_type_node = build_complex_type (float_type_node, true);
9639 complex_double_type_node = build_complex_type (double_type_node, true);
9640 complex_long_double_type_node = build_complex_type (long_double_type_node,
9641 true);
9643 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9645 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9646 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9647 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9650 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9651 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9652 sat_ ## KIND ## _type_node = \
9653 make_sat_signed_ ## KIND ## _type (SIZE); \
9654 sat_unsigned_ ## KIND ## _type_node = \
9655 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9656 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9657 unsigned_ ## KIND ## _type_node = \
9658 make_unsigned_ ## KIND ## _type (SIZE);
9660 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9661 sat_ ## WIDTH ## KIND ## _type_node = \
9662 make_sat_signed_ ## KIND ## _type (SIZE); \
9663 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9664 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9665 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9666 unsigned_ ## WIDTH ## KIND ## _type_node = \
9667 make_unsigned_ ## KIND ## _type (SIZE);
9669 /* Make fixed-point type nodes based on four different widths. */
9670 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9671 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9672 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9673 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9674 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9676 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9677 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9678 NAME ## _type_node = \
9679 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9680 u ## NAME ## _type_node = \
9681 make_or_reuse_unsigned_ ## KIND ## _type \
9682 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9683 sat_ ## NAME ## _type_node = \
9684 make_or_reuse_sat_signed_ ## KIND ## _type \
9685 (GET_MODE_BITSIZE (MODE ## mode)); \
9686 sat_u ## NAME ## _type_node = \
9687 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9688 (GET_MODE_BITSIZE (U ## MODE ## mode));
9690 /* Fixed-point type and mode nodes. */
9691 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9692 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9693 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9694 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9695 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9696 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9697 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9698 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9699 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9700 MAKE_FIXED_MODE_NODE (accum, da, DA)
9701 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9704 tree t = targetm.build_builtin_va_list ();
9706 /* Many back-ends define record types without setting TYPE_NAME.
9707 If we copied the record type here, we'd keep the original
9708 record type without a name. This breaks name mangling. So,
9709 don't copy record types and let c_common_nodes_and_builtins()
9710 declare the type to be __builtin_va_list. */
9711 if (TREE_CODE (t) != RECORD_TYPE)
9712 t = build_variant_type_copy (t);
9714 va_list_type_node = t;
9717 /* SCEV analyzer global shared trees. */
9718 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9719 TREE_TYPE (chrec_dont_know) = void_type_node;
9720 chrec_known = make_node (SCEV_KNOWN);
9721 TREE_TYPE (chrec_known) = void_type_node;
9724 /* Modify DECL for given flags.
9725 TM_PURE attribute is set only on types, so the function will modify
9726 DECL's type when ECF_TM_PURE is used. */
9728 void
9729 set_call_expr_flags (tree decl, int flags)
9731 if (flags & ECF_NOTHROW)
9732 TREE_NOTHROW (decl) = 1;
9733 if (flags & ECF_CONST)
9734 TREE_READONLY (decl) = 1;
9735 if (flags & ECF_PURE)
9736 DECL_PURE_P (decl) = 1;
9737 if (flags & ECF_LOOPING_CONST_OR_PURE)
9738 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9739 if (flags & ECF_NOVOPS)
9740 DECL_IS_NOVOPS (decl) = 1;
9741 if (flags & ECF_NORETURN)
9742 TREE_THIS_VOLATILE (decl) = 1;
9743 if (flags & ECF_MALLOC)
9744 DECL_IS_MALLOC (decl) = 1;
9745 if (flags & ECF_RETURNS_TWICE)
9746 DECL_IS_RETURNS_TWICE (decl) = 1;
9747 if (flags & ECF_LEAF)
9748 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9749 NULL, DECL_ATTRIBUTES (decl));
9750 if (flags & ECF_COLD)
9751 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9752 NULL, DECL_ATTRIBUTES (decl));
9753 if (flags & ECF_RET1)
9754 DECL_ATTRIBUTES (decl)
9755 = tree_cons (get_identifier ("fn spec"),
9756 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9757 DECL_ATTRIBUTES (decl));
9758 if ((flags & ECF_TM_PURE) && flag_tm)
9759 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9760 /* Looping const or pure is implied by noreturn.
9761 There is currently no way to declare looping const or looping pure alone. */
9762 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9763 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9767 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9769 static void
9770 local_define_builtin (const char *name, tree type, enum built_in_function code,
9771 const char *library_name, int ecf_flags)
9773 tree decl;
9775 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9776 library_name, NULL_TREE);
9777 set_call_expr_flags (decl, ecf_flags);
9779 set_builtin_decl (code, decl, true);
9782 /* Call this function after instantiating all builtins that the language
9783 front end cares about. This will build the rest of the builtins
9784 and internal functions that are relied upon by the tree optimizers and
9785 the middle-end. */
9787 void
9788 build_common_builtin_nodes (void)
9790 tree tmp, ftype;
9791 int ecf_flags;
9793 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9795 ftype = build_function_type_list (void_type_node,
9796 ptr_type_node,
9797 ptr_type_node,
9798 integer_type_node,
9799 NULL_TREE);
9800 local_define_builtin ("__builtin_clear_padding", ftype,
9801 BUILT_IN_CLEAR_PADDING,
9802 "__builtin_clear_padding",
9803 ECF_LEAF | ECF_NOTHROW);
9806 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9807 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9808 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9809 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9811 ftype = build_function_type (void_type_node, void_list_node);
9812 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9813 local_define_builtin ("__builtin_unreachable", ftype,
9814 BUILT_IN_UNREACHABLE,
9815 "__builtin_unreachable",
9816 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9817 | ECF_CONST | ECF_COLD);
9818 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9819 local_define_builtin ("__builtin_unreachable trap", ftype,
9820 BUILT_IN_UNREACHABLE_TRAP,
9821 "__builtin_unreachable trap",
9822 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9823 | ECF_CONST | ECF_COLD);
9824 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9825 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9826 "abort",
9827 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9828 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9829 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9830 "__builtin_trap",
9831 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9834 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9835 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9837 ftype = build_function_type_list (ptr_type_node,
9838 ptr_type_node, const_ptr_type_node,
9839 size_type_node, NULL_TREE);
9841 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9842 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9843 "memcpy", ECF_NOTHROW | ECF_LEAF);
9844 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9845 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9846 "memmove", ECF_NOTHROW | ECF_LEAF);
9849 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9851 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9852 const_ptr_type_node, size_type_node,
9853 NULL_TREE);
9854 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9855 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9858 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9860 ftype = build_function_type_list (ptr_type_node,
9861 ptr_type_node, integer_type_node,
9862 size_type_node, NULL_TREE);
9863 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9864 "memset", ECF_NOTHROW | ECF_LEAF);
9867 /* If we're checking the stack, `alloca' can throw. */
9868 const int alloca_flags
9869 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9871 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9873 ftype = build_function_type_list (ptr_type_node,
9874 size_type_node, NULL_TREE);
9875 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9876 "alloca", alloca_flags);
9879 ftype = build_function_type_list (ptr_type_node, size_type_node,
9880 size_type_node, NULL_TREE);
9881 local_define_builtin ("__builtin_alloca_with_align", ftype,
9882 BUILT_IN_ALLOCA_WITH_ALIGN,
9883 "__builtin_alloca_with_align",
9884 alloca_flags);
9886 ftype = build_function_type_list (ptr_type_node, size_type_node,
9887 size_type_node, size_type_node, NULL_TREE);
9888 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9889 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9890 "__builtin_alloca_with_align_and_max",
9891 alloca_flags);
9893 ftype = build_function_type_list (void_type_node,
9894 ptr_type_node, ptr_type_node,
9895 ptr_type_node, NULL_TREE);
9896 local_define_builtin ("__builtin_init_trampoline", ftype,
9897 BUILT_IN_INIT_TRAMPOLINE,
9898 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9899 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9900 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9901 "__builtin_init_heap_trampoline",
9902 ECF_NOTHROW | ECF_LEAF);
9903 local_define_builtin ("__builtin_init_descriptor", ftype,
9904 BUILT_IN_INIT_DESCRIPTOR,
9905 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9907 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9908 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9909 BUILT_IN_ADJUST_TRAMPOLINE,
9910 "__builtin_adjust_trampoline",
9911 ECF_CONST | ECF_NOTHROW);
9912 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9913 BUILT_IN_ADJUST_DESCRIPTOR,
9914 "__builtin_adjust_descriptor",
9915 ECF_CONST | ECF_NOTHROW);
9917 ftype = build_function_type_list (void_type_node,
9918 ptr_type_node, ptr_type_node, NULL_TREE);
9919 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9920 local_define_builtin ("__builtin___clear_cache", ftype,
9921 BUILT_IN_CLEAR_CACHE,
9922 "__clear_cache",
9923 ECF_NOTHROW);
9925 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9926 BUILT_IN_NONLOCAL_GOTO,
9927 "__builtin_nonlocal_goto",
9928 ECF_NORETURN | ECF_NOTHROW);
9930 ftype = build_function_type_list (void_type_node,
9931 ptr_type_node, ptr_type_node, NULL_TREE);
9932 local_define_builtin ("__builtin_setjmp_setup", ftype,
9933 BUILT_IN_SETJMP_SETUP,
9934 "__builtin_setjmp_setup", ECF_NOTHROW);
9936 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9937 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9938 BUILT_IN_SETJMP_RECEIVER,
9939 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9941 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9942 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9943 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9945 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9946 local_define_builtin ("__builtin_stack_restore", ftype,
9947 BUILT_IN_STACK_RESTORE,
9948 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9950 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9951 const_ptr_type_node, size_type_node,
9952 NULL_TREE);
9953 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9954 "__builtin_memcmp_eq",
9955 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9957 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9958 "__builtin_strncmp_eq",
9959 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9961 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9962 "__builtin_strcmp_eq",
9963 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9965 /* If there's a possibility that we might use the ARM EABI, build the
9966 alternate __cxa_end_cleanup node used to resume from C++. */
9967 if (targetm.arm_eabi_unwinder)
9969 ftype = build_function_type_list (void_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9971 BUILT_IN_CXA_END_CLEANUP,
9972 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9975 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_unwind_resume", ftype,
9977 BUILT_IN_UNWIND_RESUME,
9978 ((targetm_common.except_unwind_info (&global_options)
9979 == UI_SJLJ)
9980 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9981 ECF_NORETURN);
9983 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9985 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9986 NULL_TREE);
9987 local_define_builtin ("__builtin_return_address", ftype,
9988 BUILT_IN_RETURN_ADDRESS,
9989 "__builtin_return_address",
9990 ECF_NOTHROW);
9993 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9994 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9996 ftype = build_function_type_list (void_type_node, ptr_type_node,
9997 ptr_type_node, NULL_TREE);
9998 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9999 local_define_builtin ("__cyg_profile_func_enter", ftype,
10000 BUILT_IN_PROFILE_FUNC_ENTER,
10001 "__cyg_profile_func_enter", 0);
10002 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10003 local_define_builtin ("__cyg_profile_func_exit", ftype,
10004 BUILT_IN_PROFILE_FUNC_EXIT,
10005 "__cyg_profile_func_exit", 0);
10008 /* The exception object and filter values from the runtime. The argument
10009 must be zero before exception lowering, i.e. from the front end. After
10010 exception lowering, it will be the region number for the exception
10011 landing pad. These functions are PURE instead of CONST to prevent
10012 them from being hoisted past the exception edge that will initialize
10013 its value in the landing pad. */
10014 ftype = build_function_type_list (ptr_type_node,
10015 integer_type_node, NULL_TREE);
10016 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10017 /* Only use TM_PURE if we have TM language support. */
10018 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10019 ecf_flags |= ECF_TM_PURE;
10020 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10021 "__builtin_eh_pointer", ecf_flags);
10023 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10024 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10025 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10026 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10028 ftype = build_function_type_list (void_type_node,
10029 integer_type_node, integer_type_node,
10030 NULL_TREE);
10031 local_define_builtin ("__builtin_eh_copy_values", ftype,
10032 BUILT_IN_EH_COPY_VALUES,
10033 "__builtin_eh_copy_values", ECF_NOTHROW);
10035 /* Complex multiplication and division. These are handled as builtins
10036 rather than optabs because emit_library_call_value doesn't support
10037 complex. Further, we can do slightly better with folding these
10038 beasties if the real and complex parts of the arguments are separate. */
10040 int mode;
10042 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10044 char mode_name_buf[4], *q;
10045 const char *p;
10046 enum built_in_function mcode, dcode;
10047 tree type, inner_type;
10048 const char *prefix = "__";
10050 if (targetm.libfunc_gnu_prefix)
10051 prefix = "__gnu_";
10053 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10054 if (type == NULL)
10055 continue;
10056 inner_type = TREE_TYPE (type);
10058 ftype = build_function_type_list (type, inner_type, inner_type,
10059 inner_type, inner_type, NULL_TREE);
10061 mcode = ((enum built_in_function)
10062 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10063 dcode = ((enum built_in_function)
10064 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10066 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10067 *q = TOLOWER (*p);
10068 *q = '\0';
10070 /* For -ftrapping-math these should throw from a former
10071 -fnon-call-exception stmt. */
10072 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10073 NULL);
10074 local_define_builtin (built_in_names[mcode], ftype, mcode,
10075 built_in_names[mcode],
10076 ECF_CONST | ECF_LEAF);
10078 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10079 NULL);
10080 local_define_builtin (built_in_names[dcode], ftype, dcode,
10081 built_in_names[dcode],
10082 ECF_CONST | ECF_LEAF);
10086 init_internal_fns ();
10089 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10090 better way.
10092 If we requested a pointer to a vector, build up the pointers that
10093 we stripped off while looking for the inner type. Similarly for
10094 return values from functions.
10096 The argument TYPE is the top of the chain, and BOTTOM is the
10097 new type which we will point to. */
10099 tree
10100 reconstruct_complex_type (tree type, tree bottom)
10102 tree inner, outer;
10104 if (TREE_CODE (type) == POINTER_TYPE)
10106 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10107 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10108 TYPE_REF_CAN_ALIAS_ALL (type));
10110 else if (TREE_CODE (type) == REFERENCE_TYPE)
10112 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10113 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10114 TYPE_REF_CAN_ALIAS_ALL (type));
10116 else if (TREE_CODE (type) == ARRAY_TYPE)
10118 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10119 outer = build_array_type (inner, TYPE_DOMAIN (type));
10121 else if (TREE_CODE (type) == FUNCTION_TYPE)
10123 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10124 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10125 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10127 else if (TREE_CODE (type) == METHOD_TYPE)
10129 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10130 /* The build_method_type_directly() routine prepends 'this' to argument list,
10131 so we must compensate by getting rid of it. */
10132 outer
10133 = build_method_type_directly
10134 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10135 inner,
10136 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10138 else if (TREE_CODE (type) == OFFSET_TYPE)
10140 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10141 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10143 else
10144 return bottom;
10146 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10147 TYPE_QUALS (type));
10150 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10151 the inner type. */
10152 tree
10153 build_vector_type_for_mode (tree innertype, machine_mode mode)
10155 poly_int64 nunits;
10156 unsigned int bitsize;
10158 switch (GET_MODE_CLASS (mode))
10160 case MODE_VECTOR_BOOL:
10161 case MODE_VECTOR_INT:
10162 case MODE_VECTOR_FLOAT:
10163 case MODE_VECTOR_FRACT:
10164 case MODE_VECTOR_UFRACT:
10165 case MODE_VECTOR_ACCUM:
10166 case MODE_VECTOR_UACCUM:
10167 nunits = GET_MODE_NUNITS (mode);
10168 break;
10170 case MODE_INT:
10171 /* Check that there are no leftover bits. */
10172 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10173 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10174 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10175 break;
10177 default:
10178 gcc_unreachable ();
10181 return make_vector_type (innertype, nunits, mode);
10184 /* Similarly, but takes the inner type and number of units, which must be
10185 a power of two. */
10187 tree
10188 build_vector_type (tree innertype, poly_int64 nunits)
10190 return make_vector_type (innertype, nunits, VOIDmode);
10193 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10195 tree
10196 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10198 gcc_assert (mask_mode != BLKmode);
10200 unsigned HOST_WIDE_INT esize;
10201 if (VECTOR_MODE_P (mask_mode))
10203 poly_uint64 vsize = GET_MODE_PRECISION (mask_mode);
10204 esize = vector_element_size (vsize, nunits);
10206 else
10207 esize = 1;
10209 tree bool_type = build_nonstandard_boolean_type (esize);
10211 return make_vector_type (bool_type, nunits, mask_mode);
10214 /* Build a vector type that holds one boolean result for each element of
10215 vector type VECTYPE. The public interface for this operation is
10216 truth_type_for. */
10218 static tree
10219 build_truth_vector_type_for (tree vectype)
10221 machine_mode vector_mode = TYPE_MODE (vectype);
10222 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10224 machine_mode mask_mode;
10225 if (VECTOR_MODE_P (vector_mode)
10226 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10227 return build_truth_vector_type_for_mode (nunits, mask_mode);
10229 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10230 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10231 tree bool_type = build_nonstandard_boolean_type (esize);
10233 return make_vector_type (bool_type, nunits, VOIDmode);
10236 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10237 set. */
10239 tree
10240 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10242 tree t = make_vector_type (innertype, nunits, VOIDmode);
10243 tree cand;
10244 /* We always build the non-opaque variant before the opaque one,
10245 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10246 cand = TYPE_NEXT_VARIANT (t);
10247 if (cand
10248 && TYPE_VECTOR_OPAQUE (cand)
10249 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10250 return cand;
10251 /* Othewise build a variant type and make sure to queue it after
10252 the non-opaque type. */
10253 cand = build_distinct_type_copy (t);
10254 TYPE_VECTOR_OPAQUE (cand) = true;
10255 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10256 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10257 TYPE_NEXT_VARIANT (t) = cand;
10258 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10259 return cand;
10262 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10264 static poly_wide_int
10265 vector_cst_int_elt (const_tree t, unsigned int i)
10267 /* First handle elements that are directly encoded. */
10268 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10269 if (i < encoded_nelts)
10270 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10272 /* Identify the pattern that contains element I and work out the index of
10273 the last encoded element for that pattern. */
10274 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10275 unsigned int pattern = i % npatterns;
10276 unsigned int count = i / npatterns;
10277 unsigned int final_i = encoded_nelts - npatterns + pattern;
10279 /* If there are no steps, the final encoded value is the right one. */
10280 if (!VECTOR_CST_STEPPED_P (t))
10281 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10283 /* Otherwise work out the value from the last two encoded elements. */
10284 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10285 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10286 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10287 return wi::to_poly_wide (v2) + (count - 2) * diff;
10290 /* Return the value of element I of VECTOR_CST T. */
10292 tree
10293 vector_cst_elt (const_tree t, unsigned int i)
10295 /* First handle elements that are directly encoded. */
10296 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10297 if (i < encoded_nelts)
10298 return VECTOR_CST_ENCODED_ELT (t, i);
10300 /* If there are no steps, the final encoded value is the right one. */
10301 if (!VECTOR_CST_STEPPED_P (t))
10303 /* Identify the pattern that contains element I and work out the index of
10304 the last encoded element for that pattern. */
10305 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10306 unsigned int pattern = i % npatterns;
10307 unsigned int final_i = encoded_nelts - npatterns + pattern;
10308 return VECTOR_CST_ENCODED_ELT (t, final_i);
10311 /* Otherwise work out the value from the last two encoded elements. */
10312 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10313 vector_cst_int_elt (t, i));
10316 /* Given an initializer INIT, return TRUE if INIT is zero or some
10317 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10318 null, set *NONZERO if and only if INIT is known not to be all
10319 zeros. The combination of return value of false and *NONZERO
10320 false implies that INIT may but need not be all zeros. Other
10321 combinations indicate definitive answers. */
10323 bool
10324 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10326 bool dummy;
10327 if (!nonzero)
10328 nonzero = &dummy;
10330 /* Conservatively clear NONZERO and set it only if INIT is definitely
10331 not all zero. */
10332 *nonzero = false;
10334 STRIP_NOPS (init);
10336 unsigned HOST_WIDE_INT off = 0;
10338 switch (TREE_CODE (init))
10340 case INTEGER_CST:
10341 if (integer_zerop (init))
10342 return true;
10344 *nonzero = true;
10345 return false;
10347 case REAL_CST:
10348 /* ??? Note that this is not correct for C4X float formats. There,
10349 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10350 negative exponent. */
10351 if (real_zerop (init)
10352 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10353 return true;
10355 *nonzero = true;
10356 return false;
10358 case FIXED_CST:
10359 if (fixed_zerop (init))
10360 return true;
10362 *nonzero = true;
10363 return false;
10365 case COMPLEX_CST:
10366 if (integer_zerop (init)
10367 || (real_zerop (init)
10368 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10369 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10370 return true;
10372 *nonzero = true;
10373 return false;
10375 case VECTOR_CST:
10376 if (VECTOR_CST_NPATTERNS (init) == 1
10377 && VECTOR_CST_DUPLICATE_P (init)
10378 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10379 return true;
10381 *nonzero = true;
10382 return false;
10384 case CONSTRUCTOR:
10386 if (TREE_CLOBBER_P (init))
10387 return false;
10389 unsigned HOST_WIDE_INT idx;
10390 tree elt;
10392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10393 if (!initializer_zerop (elt, nonzero))
10394 return false;
10396 return true;
10399 case MEM_REF:
10401 tree arg = TREE_OPERAND (init, 0);
10402 if (TREE_CODE (arg) != ADDR_EXPR)
10403 return false;
10404 tree offset = TREE_OPERAND (init, 1);
10405 if (TREE_CODE (offset) != INTEGER_CST
10406 || !tree_fits_uhwi_p (offset))
10407 return false;
10408 off = tree_to_uhwi (offset);
10409 if (INT_MAX < off)
10410 return false;
10411 arg = TREE_OPERAND (arg, 0);
10412 if (TREE_CODE (arg) != STRING_CST)
10413 return false;
10414 init = arg;
10416 /* Fall through. */
10418 case STRING_CST:
10420 gcc_assert (off <= INT_MAX);
10422 int i = off;
10423 int n = TREE_STRING_LENGTH (init);
10424 if (n <= i)
10425 return false;
10427 /* We need to loop through all elements to handle cases like
10428 "\0" and "\0foobar". */
10429 for (i = 0; i < n; ++i)
10430 if (TREE_STRING_POINTER (init)[i] != '\0')
10432 *nonzero = true;
10433 return false;
10436 return true;
10439 default:
10440 return false;
10444 /* Return true if EXPR is an initializer expression in which every element
10445 is a constant that is numerically equal to 0 or 1. The elements do not
10446 need to be equal to each other. */
10448 bool
10449 initializer_each_zero_or_onep (const_tree expr)
10451 STRIP_ANY_LOCATION_WRAPPER (expr);
10453 switch (TREE_CODE (expr))
10455 case INTEGER_CST:
10456 return integer_zerop (expr) || integer_onep (expr);
10458 case REAL_CST:
10459 return real_zerop (expr) || real_onep (expr);
10461 case VECTOR_CST:
10463 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10464 if (VECTOR_CST_STEPPED_P (expr)
10465 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10466 return false;
10468 for (unsigned int i = 0; i < nelts; ++i)
10470 tree elt = vector_cst_elt (expr, i);
10471 if (!initializer_each_zero_or_onep (elt))
10472 return false;
10475 return true;
10478 default:
10479 return false;
10483 /* Check if vector VEC consists of all the equal elements and
10484 that the number of elements corresponds to the type of VEC.
10485 The function returns first element of the vector
10486 or NULL_TREE if the vector is not uniform. */
10487 tree
10488 uniform_vector_p (const_tree vec)
10490 tree first, t;
10491 unsigned HOST_WIDE_INT i, nelts;
10493 if (vec == NULL_TREE)
10494 return NULL_TREE;
10496 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10498 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10499 return TREE_OPERAND (vec, 0);
10501 else if (TREE_CODE (vec) == VECTOR_CST)
10503 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10504 return VECTOR_CST_ENCODED_ELT (vec, 0);
10505 return NULL_TREE;
10508 else if (TREE_CODE (vec) == CONSTRUCTOR
10509 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10511 first = error_mark_node;
10513 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10515 if (i == 0)
10517 first = t;
10518 continue;
10520 if (!operand_equal_p (first, t, 0))
10521 return NULL_TREE;
10523 if (i != nelts)
10524 return NULL_TREE;
10526 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10527 return uniform_vector_p (first);
10528 return first;
10531 return NULL_TREE;
10534 /* If the argument is INTEGER_CST, return it. If the argument is vector
10535 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10536 return NULL_TREE.
10537 Look through location wrappers. */
10539 tree
10540 uniform_integer_cst_p (tree t)
10542 STRIP_ANY_LOCATION_WRAPPER (t);
10544 if (TREE_CODE (t) == INTEGER_CST)
10545 return t;
10547 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10549 t = uniform_vector_p (t);
10550 if (t && TREE_CODE (t) == INTEGER_CST)
10551 return t;
10554 return NULL_TREE;
10557 /* Checks to see if T is a constant or a constant vector and if each element E
10558 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10560 tree
10561 bitmask_inv_cst_vector_p (tree t)
10564 tree_code code = TREE_CODE (t);
10565 tree type = TREE_TYPE (t);
10567 if (!INTEGRAL_TYPE_P (type)
10568 && !VECTOR_INTEGER_TYPE_P (type))
10569 return NULL_TREE;
10571 unsigned HOST_WIDE_INT nelts = 1;
10572 tree cst;
10573 unsigned int idx = 0;
10574 bool uniform = uniform_integer_cst_p (t);
10575 tree newtype = unsigned_type_for (type);
10576 tree_vector_builder builder;
10577 if (code == INTEGER_CST)
10578 cst = t;
10579 else
10581 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10582 return NULL_TREE;
10584 cst = vector_cst_elt (t, 0);
10585 builder.new_vector (newtype, nelts, 1);
10588 tree ty = unsigned_type_for (TREE_TYPE (cst));
10592 if (idx > 0)
10593 cst = vector_cst_elt (t, idx);
10594 wide_int icst = wi::to_wide (cst);
10595 wide_int inv = wi::bit_not (icst);
10596 icst = wi::add (1, inv);
10597 if (wi::popcount (icst) != 1)
10598 return NULL_TREE;
10600 tree newcst = wide_int_to_tree (ty, inv);
10602 if (uniform)
10603 return build_uniform_cst (newtype, newcst);
10605 builder.quick_push (newcst);
10607 while (++idx < nelts);
10609 return builder.build ();
10612 /* If VECTOR_CST T has a single nonzero element, return the index of that
10613 element, otherwise return -1. */
10616 single_nonzero_element (const_tree t)
10618 unsigned HOST_WIDE_INT nelts;
10619 unsigned int repeat_nelts;
10620 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10621 repeat_nelts = nelts;
10622 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10624 nelts = vector_cst_encoded_nelts (t);
10625 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10627 else
10628 return -1;
10630 int res = -1;
10631 for (unsigned int i = 0; i < nelts; ++i)
10633 tree elt = vector_cst_elt (t, i);
10634 if (!integer_zerop (elt) && !real_zerop (elt))
10636 if (res >= 0 || i >= repeat_nelts)
10637 return -1;
10638 res = i;
10641 return res;
10644 /* Build an empty statement at location LOC. */
10646 tree
10647 build_empty_stmt (location_t loc)
10649 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10650 SET_EXPR_LOCATION (t, loc);
10651 return t;
10655 /* Build an OMP clause with code CODE. LOC is the location of the
10656 clause. */
10658 tree
10659 build_omp_clause (location_t loc, enum omp_clause_code code)
10661 tree t;
10662 int size, length;
10664 length = omp_clause_num_ops[code];
10665 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10667 record_node_allocation_statistics (OMP_CLAUSE, size);
10669 t = (tree) ggc_internal_alloc (size);
10670 memset (t, 0, size);
10671 TREE_SET_CODE (t, OMP_CLAUSE);
10672 OMP_CLAUSE_SET_CODE (t, code);
10673 OMP_CLAUSE_LOCATION (t) = loc;
10675 return t;
10678 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10679 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10680 Except for the CODE and operand count field, other storage for the
10681 object is initialized to zeros. */
10683 tree
10684 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10686 tree t;
10687 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10689 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10690 gcc_assert (len >= 1);
10692 record_node_allocation_statistics (code, length);
10694 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10696 TREE_SET_CODE (t, code);
10698 /* Can't use TREE_OPERAND to store the length because if checking is
10699 enabled, it will try to check the length before we store it. :-P */
10700 t->exp.operands[0] = build_int_cst (sizetype, len);
10702 return t;
10705 /* Helper function for build_call_* functions; build a CALL_EXPR with
10706 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10707 the argument slots. */
10709 static tree
10710 build_call_1 (tree return_type, tree fn, int nargs)
10712 tree t;
10714 t = build_vl_exp (CALL_EXPR, nargs + 3);
10715 TREE_TYPE (t) = return_type;
10716 CALL_EXPR_FN (t) = fn;
10717 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10719 return t;
10722 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10723 FN and a null static chain slot. NARGS is the number of call arguments
10724 which are specified as "..." arguments. */
10726 tree
10727 build_call_nary (tree return_type, tree fn, int nargs, ...)
10729 tree ret;
10730 va_list args;
10731 va_start (args, nargs);
10732 ret = build_call_valist (return_type, fn, nargs, args);
10733 va_end (args);
10734 return ret;
10737 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10738 FN and a null static chain slot. NARGS is the number of call arguments
10739 which are specified as a va_list ARGS. */
10741 tree
10742 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10744 tree t;
10745 int i;
10747 t = build_call_1 (return_type, fn, nargs);
10748 for (i = 0; i < nargs; i++)
10749 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10750 process_call_operands (t);
10751 return t;
10754 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10755 FN and a null static chain slot. NARGS is the number of call arguments
10756 which are specified as a tree array ARGS. */
10758 tree
10759 build_call_array_loc (location_t loc, tree return_type, tree fn,
10760 int nargs, const tree *args)
10762 tree t;
10763 int i;
10765 t = build_call_1 (return_type, fn, nargs);
10766 for (i = 0; i < nargs; i++)
10767 CALL_EXPR_ARG (t, i) = args[i];
10768 process_call_operands (t);
10769 SET_EXPR_LOCATION (t, loc);
10770 return t;
10773 /* Like build_call_array, but takes a vec. */
10775 tree
10776 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10778 tree ret, t;
10779 unsigned int ix;
10781 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10782 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10783 CALL_EXPR_ARG (ret, ix) = t;
10784 process_call_operands (ret);
10785 return ret;
10788 /* Conveniently construct a function call expression. FNDECL names the
10789 function to be called and N arguments are passed in the array
10790 ARGARRAY. */
10792 tree
10793 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10795 tree fntype = TREE_TYPE (fndecl);
10796 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10798 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10801 /* Conveniently construct a function call expression. FNDECL names the
10802 function to be called and the arguments are passed in the vector
10803 VEC. */
10805 tree
10806 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10808 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10809 vec_safe_address (vec));
10813 /* Conveniently construct a function call expression. FNDECL names the
10814 function to be called, N is the number of arguments, and the "..."
10815 parameters are the argument expressions. */
10817 tree
10818 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10820 va_list ap;
10821 tree *argarray = XALLOCAVEC (tree, n);
10822 int i;
10824 va_start (ap, n);
10825 for (i = 0; i < n; i++)
10826 argarray[i] = va_arg (ap, tree);
10827 va_end (ap);
10828 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10831 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10832 varargs macros aren't supported by all bootstrap compilers. */
10834 tree
10835 build_call_expr (tree fndecl, int n, ...)
10837 va_list ap;
10838 tree *argarray = XALLOCAVEC (tree, n);
10839 int i;
10841 va_start (ap, n);
10842 for (i = 0; i < n; i++)
10843 argarray[i] = va_arg (ap, tree);
10844 va_end (ap);
10845 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10848 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10849 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10850 It will get gimplified later into an ordinary internal function. */
10852 tree
10853 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10854 tree type, int n, const tree *args)
10856 tree t = build_call_1 (type, NULL_TREE, n);
10857 for (int i = 0; i < n; ++i)
10858 CALL_EXPR_ARG (t, i) = args[i];
10859 SET_EXPR_LOCATION (t, loc);
10860 CALL_EXPR_IFN (t) = ifn;
10861 process_call_operands (t);
10862 return t;
10865 /* Build internal call expression. This is just like CALL_EXPR, except
10866 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10867 internal function. */
10869 tree
10870 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10871 tree type, int n, ...)
10873 va_list ap;
10874 tree *argarray = XALLOCAVEC (tree, n);
10875 int i;
10877 va_start (ap, n);
10878 for (i = 0; i < n; i++)
10879 argarray[i] = va_arg (ap, tree);
10880 va_end (ap);
10881 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10884 /* Return a function call to FN, if the target is guaranteed to support it,
10885 or null otherwise.
10887 N is the number of arguments, passed in the "...", and TYPE is the
10888 type of the return value. */
10890 tree
10891 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10892 int n, ...)
10894 va_list ap;
10895 tree *argarray = XALLOCAVEC (tree, n);
10896 int i;
10898 va_start (ap, n);
10899 for (i = 0; i < n; i++)
10900 argarray[i] = va_arg (ap, tree);
10901 va_end (ap);
10902 if (internal_fn_p (fn))
10904 internal_fn ifn = as_internal_fn (fn);
10905 if (direct_internal_fn_p (ifn))
10907 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10908 if (!direct_internal_fn_supported_p (ifn, types,
10909 OPTIMIZE_FOR_BOTH))
10910 return NULL_TREE;
10912 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10914 else
10916 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10917 if (!fndecl)
10918 return NULL_TREE;
10919 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10923 /* Return a function call to the appropriate builtin alloca variant.
10925 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10926 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10927 bound for SIZE in case it is not a fixed value. */
10929 tree
10930 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10932 if (max_size >= 0)
10934 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10935 return
10936 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10938 else if (align > 0)
10940 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10941 return build_call_expr (t, 2, size, size_int (align));
10943 else
10945 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10946 return build_call_expr (t, 1, size);
10950 /* The built-in decl to use to mark code points believed to be unreachable.
10951 Typically __builtin_unreachable, but __builtin_trap if
10952 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10953 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10954 appropriate ubsan function. When building a call directly, use
10955 {gimple_},build_builtin_unreachable instead. */
10957 tree
10958 builtin_decl_unreachable ()
10960 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10962 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10963 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10964 : flag_unreachable_traps)
10965 fncode = BUILT_IN_UNREACHABLE_TRAP;
10966 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10967 in the sanopt pass. */
10969 return builtin_decl_explicit (fncode);
10972 /* Build a call to __builtin_unreachable, possibly rewritten by
10973 -fsanitize=unreachable. Use this rather than the above when practical. */
10975 tree
10976 build_builtin_unreachable (location_t loc)
10978 tree data = NULL_TREE;
10979 tree fn = sanitize_unreachable_fn (&data, loc);
10980 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10983 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10984 if SIZE == -1) and return a tree node representing char* pointer to
10985 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10986 the STRING_CST value is the LEN bytes at STR (the representation
10987 of the string, which may be wide). Otherwise it's all zeros. */
10989 tree
10990 build_string_literal (unsigned len, const char *str /* = NULL */,
10991 tree eltype /* = char_type_node */,
10992 unsigned HOST_WIDE_INT size /* = -1 */)
10994 tree t = build_string (len, str);
10995 /* Set the maximum valid index based on the string length or SIZE. */
10996 unsigned HOST_WIDE_INT maxidx
10997 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10999 tree index = build_index_type (size_int (maxidx));
11000 eltype = build_type_variant (eltype, 1, 0);
11001 tree type = build_array_type (eltype, index);
11002 TREE_TYPE (t) = type;
11003 TREE_CONSTANT (t) = 1;
11004 TREE_READONLY (t) = 1;
11005 TREE_STATIC (t) = 1;
11007 type = build_pointer_type (eltype);
11008 t = build1 (ADDR_EXPR, type,
11009 build4 (ARRAY_REF, eltype,
11010 t, integer_zero_node, NULL_TREE, NULL_TREE));
11011 return t;
11016 /* Return true if T (assumed to be a DECL) must be assigned a memory
11017 location. */
11019 bool
11020 needs_to_live_in_memory (const_tree t)
11022 return (TREE_ADDRESSABLE (t)
11023 || is_global_var (t)
11024 || (TREE_CODE (t) == RESULT_DECL
11025 && !DECL_BY_REFERENCE (t)
11026 && aggregate_value_p (t, current_function_decl)));
11029 /* Return value of a constant X and sign-extend it. */
11031 HOST_WIDE_INT
11032 int_cst_value (const_tree x)
11034 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11035 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11037 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11038 gcc_assert (cst_and_fits_in_hwi (x));
11040 if (bits < HOST_BITS_PER_WIDE_INT)
11042 bool negative = ((val >> (bits - 1)) & 1) != 0;
11043 if (negative)
11044 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11045 else
11046 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11049 return val;
11052 /* If TYPE is an integral or pointer type, return an integer type with
11053 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11054 if TYPE is already an integer type of signedness UNSIGNEDP.
11055 If TYPE is a floating-point type, return an integer type with the same
11056 bitsize and with the signedness given by UNSIGNEDP; this is useful
11057 when doing bit-level operations on a floating-point value. */
11059 tree
11060 signed_or_unsigned_type_for (int unsignedp, tree type)
11062 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11063 return type;
11065 if (TREE_CODE (type) == VECTOR_TYPE)
11067 tree inner = TREE_TYPE (type);
11068 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11069 if (!inner2)
11070 return NULL_TREE;
11071 if (inner == inner2)
11072 return type;
11073 machine_mode new_mode;
11074 if (VECTOR_MODE_P (TYPE_MODE (type))
11075 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11076 return build_vector_type_for_mode (inner2, new_mode);
11077 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11080 if (TREE_CODE (type) == COMPLEX_TYPE)
11082 tree inner = TREE_TYPE (type);
11083 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11084 if (!inner2)
11085 return NULL_TREE;
11086 if (inner == inner2)
11087 return type;
11088 return build_complex_type (inner2);
11091 unsigned int bits;
11092 if (INTEGRAL_TYPE_P (type)
11093 || POINTER_TYPE_P (type)
11094 || TREE_CODE (type) == OFFSET_TYPE)
11095 bits = TYPE_PRECISION (type);
11096 else if (TREE_CODE (type) == REAL_TYPE)
11097 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11098 else
11099 return NULL_TREE;
11101 if (TREE_CODE (type) == BITINT_TYPE && (unsignedp || bits > 1))
11102 return build_bitint_type (bits, unsignedp);
11103 return build_nonstandard_integer_type (bits, unsignedp);
11106 /* If TYPE is an integral or pointer type, return an integer type with
11107 the same precision which is unsigned, or itself if TYPE is already an
11108 unsigned integer type. If TYPE is a floating-point type, return an
11109 unsigned integer type with the same bitsize as TYPE. */
11111 tree
11112 unsigned_type_for (tree type)
11114 return signed_or_unsigned_type_for (1, type);
11117 /* If TYPE is an integral or pointer type, return an integer type with
11118 the same precision which is signed, or itself if TYPE is already a
11119 signed integer type. If TYPE is a floating-point type, return a
11120 signed integer type with the same bitsize as TYPE. */
11122 tree
11123 signed_type_for (tree type)
11125 return signed_or_unsigned_type_for (0, type);
11128 /* - For VECTOR_TYPEs:
11129 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11130 - The number of elements must match (known_eq).
11131 - targetm.vectorize.get_mask_mode exists, and exactly
11132 the same mode as the truth type.
11133 - Otherwise, the truth type must be a BOOLEAN_TYPE
11134 or useless_type_conversion_p to BOOLEAN_TYPE. */
11135 bool
11136 is_truth_type_for (tree type, tree truth_type)
11138 machine_mode mask_mode = TYPE_MODE (truth_type);
11139 machine_mode vmode = TYPE_MODE (type);
11140 machine_mode tmask_mode;
11142 if (TREE_CODE (type) == VECTOR_TYPE)
11144 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11145 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11146 TYPE_VECTOR_SUBPARTS (truth_type))
11147 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11148 && tmask_mode == mask_mode)
11149 return true;
11151 return false;
11154 return useless_type_conversion_p (boolean_type_node, truth_type);
11157 /* If TYPE is a vector type, return a signed integer vector type with the
11158 same width and number of subparts. Otherwise return boolean_type_node. */
11160 tree
11161 truth_type_for (tree type)
11163 if (TREE_CODE (type) == VECTOR_TYPE)
11165 if (VECTOR_BOOLEAN_TYPE_P (type))
11166 return type;
11167 return build_truth_vector_type_for (type);
11169 else
11170 return boolean_type_node;
11173 /* Returns the largest value obtainable by casting something in INNER type to
11174 OUTER type. */
11176 tree
11177 upper_bound_in_type (tree outer, tree inner)
11179 unsigned int det = 0;
11180 unsigned oprec = TYPE_PRECISION (outer);
11181 unsigned iprec = TYPE_PRECISION (inner);
11182 unsigned prec;
11184 /* Compute a unique number for every combination. */
11185 det |= (oprec > iprec) ? 4 : 0;
11186 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11187 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11189 /* Determine the exponent to use. */
11190 switch (det)
11192 case 0:
11193 case 1:
11194 /* oprec <= iprec, outer: signed, inner: don't care. */
11195 prec = oprec - 1;
11196 break;
11197 case 2:
11198 case 3:
11199 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11200 prec = oprec;
11201 break;
11202 case 4:
11203 /* oprec > iprec, outer: signed, inner: signed. */
11204 prec = iprec - 1;
11205 break;
11206 case 5:
11207 /* oprec > iprec, outer: signed, inner: unsigned. */
11208 prec = iprec;
11209 break;
11210 case 6:
11211 /* oprec > iprec, outer: unsigned, inner: signed. */
11212 prec = oprec;
11213 break;
11214 case 7:
11215 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11216 prec = iprec;
11217 break;
11218 default:
11219 gcc_unreachable ();
11222 return wide_int_to_tree (outer,
11223 wi::mask (prec, false, TYPE_PRECISION (outer)));
11226 /* Returns the smallest value obtainable by casting something in INNER type to
11227 OUTER type. */
11229 tree
11230 lower_bound_in_type (tree outer, tree inner)
11232 unsigned oprec = TYPE_PRECISION (outer);
11233 unsigned iprec = TYPE_PRECISION (inner);
11235 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11236 and obtain 0. */
11237 if (TYPE_UNSIGNED (outer)
11238 /* If we are widening something of an unsigned type, OUTER type
11239 contains all values of INNER type. In particular, both INNER
11240 and OUTER types have zero in common. */
11241 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11242 return build_int_cst (outer, 0);
11243 else
11245 /* If we are widening a signed type to another signed type, we
11246 want to obtain -2^^(iprec-1). If we are keeping the
11247 precision or narrowing to a signed type, we want to obtain
11248 -2^(oprec-1). */
11249 unsigned prec = oprec > iprec ? iprec : oprec;
11250 return wide_int_to_tree (outer,
11251 wi::mask (prec - 1, true,
11252 TYPE_PRECISION (outer)));
11256 /* Return true if two operands that are suitable for PHI nodes are
11257 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11258 SSA_NAME or invariant. Note that this is strictly an optimization.
11259 That is, callers of this function can directly call operand_equal_p
11260 and get the same result, only slower. */
11262 bool
11263 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11265 if (arg0 == arg1)
11266 return true;
11267 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11268 return false;
11269 return operand_equal_p (arg0, arg1, 0);
11272 /* Returns number of zeros at the end of binary representation of X. */
11274 tree
11275 num_ending_zeros (const_tree x)
11277 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11281 #define WALK_SUBTREE(NODE) \
11282 do \
11284 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11285 if (result) \
11286 return result; \
11288 while (0)
11290 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11291 be walked whenever a type is seen in the tree. Rest of operands and return
11292 value are as for walk_tree. */
11294 static tree
11295 walk_type_fields (tree type, walk_tree_fn func, void *data,
11296 hash_set<tree> *pset, walk_tree_lh lh)
11298 tree result = NULL_TREE;
11300 switch (TREE_CODE (type))
11302 case POINTER_TYPE:
11303 case REFERENCE_TYPE:
11304 case VECTOR_TYPE:
11305 /* We have to worry about mutually recursive pointers. These can't
11306 be written in C. They can in Ada. It's pathological, but
11307 there's an ACATS test (c38102a) that checks it. Deal with this
11308 by checking if we're pointing to another pointer, that one
11309 points to another pointer, that one does too, and we have no htab.
11310 If so, get a hash table. We check three levels deep to avoid
11311 the cost of the hash table if we don't need one. */
11312 if (POINTER_TYPE_P (TREE_TYPE (type))
11313 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11314 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11315 && !pset)
11317 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11318 func, data);
11319 if (result)
11320 return result;
11322 break;
11325 /* fall through */
11327 case COMPLEX_TYPE:
11328 WALK_SUBTREE (TREE_TYPE (type));
11329 break;
11331 case METHOD_TYPE:
11332 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11334 /* Fall through. */
11336 case FUNCTION_TYPE:
11337 WALK_SUBTREE (TREE_TYPE (type));
11339 tree arg;
11341 /* We never want to walk into default arguments. */
11342 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11343 WALK_SUBTREE (TREE_VALUE (arg));
11345 break;
11347 case ARRAY_TYPE:
11348 /* Don't follow this nodes's type if a pointer for fear that
11349 we'll have infinite recursion. If we have a PSET, then we
11350 need not fear. */
11351 if (pset
11352 || (!POINTER_TYPE_P (TREE_TYPE (type))
11353 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11354 WALK_SUBTREE (TREE_TYPE (type));
11355 WALK_SUBTREE (TYPE_DOMAIN (type));
11356 break;
11358 case OFFSET_TYPE:
11359 WALK_SUBTREE (TREE_TYPE (type));
11360 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11361 break;
11363 default:
11364 break;
11367 return NULL_TREE;
11370 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11371 called with the DATA and the address of each sub-tree. If FUNC returns a
11372 non-NULL value, the traversal is stopped, and the value returned by FUNC
11373 is returned. If PSET is non-NULL it is used to record the nodes visited,
11374 and to avoid visiting a node more than once. */
11376 tree
11377 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11378 hash_set<tree> *pset, walk_tree_lh lh)
11380 #define WALK_SUBTREE_TAIL(NODE) \
11381 do \
11383 tp = & (NODE); \
11384 goto tail_recurse; \
11386 while (0)
11388 tail_recurse:
11389 /* Skip empty subtrees. */
11390 if (!*tp)
11391 return NULL_TREE;
11393 /* Don't walk the same tree twice, if the user has requested
11394 that we avoid doing so. */
11395 if (pset && pset->add (*tp))
11396 return NULL_TREE;
11398 /* Call the function. */
11399 int walk_subtrees = 1;
11400 tree result = (*func) (tp, &walk_subtrees, data);
11402 /* If we found something, return it. */
11403 if (result)
11404 return result;
11406 tree t = *tp;
11407 tree_code code = TREE_CODE (t);
11409 /* Even if we didn't, FUNC may have decided that there was nothing
11410 interesting below this point in the tree. */
11411 if (!walk_subtrees)
11413 /* But we still need to check our siblings. */
11414 if (code == TREE_LIST)
11415 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11416 else if (code == OMP_CLAUSE)
11417 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11418 else
11419 return NULL_TREE;
11422 if (lh)
11424 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11425 if (result || !walk_subtrees)
11426 return result;
11429 switch (code)
11431 case ERROR_MARK:
11432 case IDENTIFIER_NODE:
11433 case INTEGER_CST:
11434 case REAL_CST:
11435 case FIXED_CST:
11436 case STRING_CST:
11437 case BLOCK:
11438 case PLACEHOLDER_EXPR:
11439 case SSA_NAME:
11440 case FIELD_DECL:
11441 case RESULT_DECL:
11442 /* None of these have subtrees other than those already walked
11443 above. */
11444 break;
11446 case TREE_LIST:
11447 WALK_SUBTREE (TREE_VALUE (t));
11448 WALK_SUBTREE_TAIL (TREE_CHAIN (t));
11450 case TREE_VEC:
11452 int len = TREE_VEC_LENGTH (t);
11454 if (len == 0)
11455 break;
11457 /* Walk all elements but the last. */
11458 for (int i = 0; i < len - 1; ++i)
11459 WALK_SUBTREE (TREE_VEC_ELT (t, i));
11461 /* Now walk the last one as a tail call. */
11462 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t, len - 1));
11465 case VECTOR_CST:
11467 unsigned len = vector_cst_encoded_nelts (t);
11468 if (len == 0)
11469 break;
11470 /* Walk all elements but the last. */
11471 for (unsigned i = 0; i < len - 1; ++i)
11472 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t, i));
11473 /* Now walk the last one as a tail call. */
11474 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t, len - 1));
11477 case COMPLEX_CST:
11478 WALK_SUBTREE (TREE_REALPART (t));
11479 WALK_SUBTREE_TAIL (TREE_IMAGPART (t));
11481 case CONSTRUCTOR:
11483 unsigned HOST_WIDE_INT idx;
11484 constructor_elt *ce;
11486 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce);
11487 idx++)
11488 WALK_SUBTREE (ce->value);
11490 break;
11492 case SAVE_EXPR:
11493 WALK_SUBTREE_TAIL (TREE_OPERAND (t, 0));
11495 case BIND_EXPR:
11497 tree decl;
11498 for (decl = BIND_EXPR_VARS (t); decl; decl = DECL_CHAIN (decl))
11500 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11501 into declarations that are just mentioned, rather than
11502 declared; they don't really belong to this part of the tree.
11503 And, we can see cycles: the initializer for a declaration
11504 can refer to the declaration itself. */
11505 WALK_SUBTREE (DECL_INITIAL (decl));
11506 WALK_SUBTREE (DECL_SIZE (decl));
11507 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11509 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t));
11512 case STATEMENT_LIST:
11514 tree_stmt_iterator i;
11515 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
11516 WALK_SUBTREE (*tsi_stmt_ptr (i));
11518 break;
11520 case OMP_CLAUSE:
11522 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
11523 for (int i = 0; i < len; i++)
11524 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t, i));
11525 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t));
11528 case TARGET_EXPR:
11530 int i, len;
11532 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11533 But, we only want to walk once. */
11534 len = (TREE_OPERAND (t, 3) == TREE_OPERAND (t, 1)) ? 2 : 3;
11535 for (i = 0; i < len; ++i)
11536 WALK_SUBTREE (TREE_OPERAND (t, i));
11537 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len));
11540 case DECL_EXPR:
11541 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11542 defining. We only want to walk into these fields of a type in this
11543 case and not in the general case of a mere reference to the type.
11545 The criterion is as follows: if the field can be an expression, it
11546 must be walked only here. This should be in keeping with the fields
11547 that are directly gimplified in gimplify_type_sizes in order for the
11548 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11549 variable-sized types.
11551 Note that DECLs get walked as part of processing the BIND_EXPR. */
11552 if (TREE_CODE (DECL_EXPR_DECL (t)) == TYPE_DECL)
11554 /* Call the function for the decl so e.g. copy_tree_body_r can
11555 replace it with the remapped one. */
11556 result = (*func) (&DECL_EXPR_DECL (t), &walk_subtrees, data);
11557 if (result || !walk_subtrees)
11558 return result;
11560 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (t));
11561 if (TREE_CODE (*type_p) == ERROR_MARK)
11562 return NULL_TREE;
11564 /* Call the function for the type. See if it returns anything or
11565 doesn't want us to continue. If we are to continue, walk both
11566 the normal fields and those for the declaration case. */
11567 result = (*func) (type_p, &walk_subtrees, data);
11568 if (result || !walk_subtrees)
11569 return result;
11571 tree type = *type_p;
11573 /* But do not walk a pointed-to type since it may itself need to
11574 be walked in the declaration case if it isn't anonymous. */
11575 if (!POINTER_TYPE_P (type))
11577 result = walk_type_fields (type, func, data, pset, lh);
11578 if (result)
11579 return result;
11582 /* If this is a record type, also walk the fields. */
11583 if (RECORD_OR_UNION_TYPE_P (type))
11585 tree field;
11587 for (field = TYPE_FIELDS (type); field;
11588 field = DECL_CHAIN (field))
11590 /* We'd like to look at the type of the field, but we can
11591 easily get infinite recursion. So assume it's pointed
11592 to elsewhere in the tree. Also, ignore things that
11593 aren't fields. */
11594 if (TREE_CODE (field) != FIELD_DECL)
11595 continue;
11597 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11598 WALK_SUBTREE (DECL_SIZE (field));
11599 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11600 if (TREE_CODE (type) == QUAL_UNION_TYPE)
11601 WALK_SUBTREE (DECL_QUALIFIER (field));
11605 /* Same for scalar types. */
11606 else if (TREE_CODE (type) == BOOLEAN_TYPE
11607 || TREE_CODE (type) == ENUMERAL_TYPE
11608 || TREE_CODE (type) == INTEGER_TYPE
11609 || TREE_CODE (type) == FIXED_POINT_TYPE
11610 || TREE_CODE (type) == REAL_TYPE)
11612 WALK_SUBTREE (TYPE_MIN_VALUE (type));
11613 WALK_SUBTREE (TYPE_MAX_VALUE (type));
11616 WALK_SUBTREE (TYPE_SIZE (type));
11617 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type));
11619 /* FALLTHRU */
11621 default:
11622 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11624 int i, len;
11626 /* Walk over all the sub-trees of this operand. */
11627 len = TREE_OPERAND_LENGTH (t);
11629 /* Go through the subtrees. We need to do this in forward order so
11630 that the scope of a FOR_EXPR is handled properly. */
11631 if (len)
11633 for (i = 0; i < len - 1; ++i)
11634 WALK_SUBTREE (TREE_OPERAND (t, i));
11635 WALK_SUBTREE_TAIL (TREE_OPERAND (t, len - 1));
11638 /* If this is a type, walk the needed fields in the type. */
11639 else if (TYPE_P (t))
11640 return walk_type_fields (t, func, data, pset, lh);
11641 break;
11644 /* We didn't find what we were looking for. */
11645 return NULL_TREE;
11647 #undef WALK_SUBTREE_TAIL
11649 #undef WALK_SUBTREE
11651 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11653 tree
11654 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11655 walk_tree_lh lh)
11657 tree result;
11659 hash_set<tree> pset;
11660 result = walk_tree_1 (tp, func, data, &pset, lh);
11661 return result;
11665 tree
11666 tree_block (tree t)
11668 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11670 if (IS_EXPR_CODE_CLASS (c))
11671 return LOCATION_BLOCK (t->exp.locus);
11672 gcc_unreachable ();
11673 return NULL;
11676 void
11677 tree_set_block (tree t, tree b)
11679 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11681 if (IS_EXPR_CODE_CLASS (c))
11683 t->exp.locus = set_block (t->exp.locus, b);
11685 else
11686 gcc_unreachable ();
11689 /* Create a nameless artificial label and put it in the current
11690 function context. The label has a location of LOC. Returns the
11691 newly created label. */
11693 tree
11694 create_artificial_label (location_t loc)
11696 tree lab = build_decl (loc,
11697 LABEL_DECL, NULL_TREE, void_type_node);
11699 DECL_ARTIFICIAL (lab) = 1;
11700 DECL_IGNORED_P (lab) = 1;
11701 DECL_CONTEXT (lab) = current_function_decl;
11702 return lab;
11705 /* Given a tree, try to return a useful variable name that we can use
11706 to prefix a temporary that is being assigned the value of the tree.
11707 I.E. given <temp> = &A, return A. */
11709 const char *
11710 get_name (tree t)
11712 tree stripped_decl;
11714 stripped_decl = t;
11715 STRIP_NOPS (stripped_decl);
11716 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11717 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11718 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11720 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11721 if (!name)
11722 return NULL;
11723 return IDENTIFIER_POINTER (name);
11725 else
11727 switch (TREE_CODE (stripped_decl))
11729 case ADDR_EXPR:
11730 return get_name (TREE_OPERAND (stripped_decl, 0));
11731 default:
11732 return NULL;
11737 /* Return true if TYPE has a variable argument list. */
11739 bool
11740 stdarg_p (const_tree fntype)
11742 function_args_iterator args_iter;
11743 tree n = NULL_TREE, t;
11745 if (!fntype)
11746 return false;
11748 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11749 return true;
11751 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11753 n = t;
11756 return n != NULL_TREE && n != void_type_node;
11759 /* Return true if TYPE has a prototype. */
11761 bool
11762 prototype_p (const_tree fntype)
11764 tree t;
11766 gcc_assert (fntype != NULL_TREE);
11768 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11769 return true;
11771 t = TYPE_ARG_TYPES (fntype);
11772 return (t != NULL_TREE);
11775 /* If BLOCK is inlined from an __attribute__((__artificial__))
11776 routine, return pointer to location from where it has been
11777 called. */
11778 location_t *
11779 block_nonartificial_location (tree block)
11781 location_t *ret = NULL;
11783 while (block && TREE_CODE (block) == BLOCK
11784 && BLOCK_ABSTRACT_ORIGIN (block))
11786 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11787 if (TREE_CODE (ao) == FUNCTION_DECL)
11789 /* If AO is an artificial inline, point RET to the
11790 call site locus at which it has been inlined and continue
11791 the loop, in case AO's caller is also an artificial
11792 inline. */
11793 if (DECL_DECLARED_INLINE_P (ao)
11794 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11795 ret = &BLOCK_SOURCE_LOCATION (block);
11796 else
11797 break;
11799 else if (TREE_CODE (ao) != BLOCK)
11800 break;
11802 block = BLOCK_SUPERCONTEXT (block);
11804 return ret;
11808 /* If EXP is inlined from an __attribute__((__artificial__))
11809 function, return the location of the original call expression. */
11811 location_t
11812 tree_nonartificial_location (tree exp)
11814 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11816 if (loc)
11817 return *loc;
11818 else
11819 return EXPR_LOCATION (exp);
11822 /* Return the location into which EXP has been inlined. Analogous
11823 to tree_nonartificial_location() above but not limited to artificial
11824 functions declared inline. If SYSTEM_HEADER is true, return
11825 the macro expansion point of the location if it's in a system header */
11827 location_t
11828 tree_inlined_location (tree exp, bool system_header /* = true */)
11830 location_t loc = UNKNOWN_LOCATION;
11832 tree block = TREE_BLOCK (exp);
11834 while (block && TREE_CODE (block) == BLOCK
11835 && BLOCK_ABSTRACT_ORIGIN (block))
11837 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11838 if (TREE_CODE (ao) == FUNCTION_DECL)
11839 loc = BLOCK_SOURCE_LOCATION (block);
11840 else if (TREE_CODE (ao) != BLOCK)
11841 break;
11843 block = BLOCK_SUPERCONTEXT (block);
11846 if (loc == UNKNOWN_LOCATION)
11848 loc = EXPR_LOCATION (exp);
11849 if (system_header)
11850 /* Only consider macro expansion when the block traversal failed
11851 to find a location. Otherwise it's not relevant. */
11852 return expansion_point_location_if_in_system_header (loc);
11855 return loc;
11858 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11859 nodes. */
11861 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11863 hashval_t
11864 cl_option_hasher::hash (tree x)
11866 const_tree const t = x;
11868 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11869 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11870 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11871 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11872 else
11873 gcc_unreachable ();
11876 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11877 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11878 same. */
11880 bool
11881 cl_option_hasher::equal (tree x, tree y)
11883 const_tree const xt = x;
11884 const_tree const yt = y;
11886 if (TREE_CODE (xt) != TREE_CODE (yt))
11887 return false;
11889 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11890 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11891 TREE_OPTIMIZATION (yt));
11892 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11893 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11894 TREE_TARGET_OPTION (yt));
11895 else
11896 gcc_unreachable ();
11899 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11901 tree
11902 build_optimization_node (struct gcc_options *opts,
11903 struct gcc_options *opts_set)
11905 tree t;
11907 /* Use the cache of optimization nodes. */
11909 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11910 opts, opts_set);
11912 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11913 t = *slot;
11914 if (!t)
11916 /* Insert this one into the hash table. */
11917 t = cl_optimization_node;
11918 *slot = t;
11920 /* Make a new node for next time round. */
11921 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11924 return t;
11927 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11929 tree
11930 build_target_option_node (struct gcc_options *opts,
11931 struct gcc_options *opts_set)
11933 tree t;
11935 /* Use the cache of optimization nodes. */
11937 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11938 opts, opts_set);
11940 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11941 t = *slot;
11942 if (!t)
11944 /* Insert this one into the hash table. */
11945 t = cl_target_option_node;
11946 *slot = t;
11948 /* Make a new node for next time round. */
11949 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11952 return t;
11955 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11956 so that they aren't saved during PCH writing. */
11958 void
11959 prepare_target_option_nodes_for_pch (void)
11961 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11962 for (; iter != cl_option_hash_table->end (); ++iter)
11963 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11964 TREE_TARGET_GLOBALS (*iter) = NULL;
11967 /* Determine the "ultimate origin" of a block. */
11969 tree
11970 block_ultimate_origin (const_tree block)
11972 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11974 if (origin == NULL_TREE)
11975 return NULL_TREE;
11976 else
11978 gcc_checking_assert ((DECL_P (origin)
11979 && DECL_ORIGIN (origin) == origin)
11980 || BLOCK_ORIGIN (origin) == origin);
11981 return origin;
11985 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11986 no instruction. */
11988 bool
11989 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11991 /* Do not strip casts into or out of differing address spaces. */
11992 if (POINTER_TYPE_P (outer_type)
11993 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11995 if (!POINTER_TYPE_P (inner_type)
11996 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11997 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11998 return false;
12000 else if (POINTER_TYPE_P (inner_type)
12001 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12003 /* We already know that outer_type is not a pointer with
12004 a non-generic address space. */
12005 return false;
12008 /* Use precision rather then machine mode when we can, which gives
12009 the correct answer even for submode (bit-field) types. */
12010 if ((INTEGRAL_TYPE_P (outer_type)
12011 || POINTER_TYPE_P (outer_type)
12012 || TREE_CODE (outer_type) == OFFSET_TYPE)
12013 && (INTEGRAL_TYPE_P (inner_type)
12014 || POINTER_TYPE_P (inner_type)
12015 || TREE_CODE (inner_type) == OFFSET_TYPE))
12016 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12018 /* Otherwise fall back on comparing machine modes (e.g. for
12019 aggregate types, floats). */
12020 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12023 /* Return true iff conversion in EXP generates no instruction. Mark
12024 it inline so that we fully inline into the stripping functions even
12025 though we have two uses of this function. */
12027 static inline bool
12028 tree_nop_conversion (const_tree exp)
12030 tree outer_type, inner_type;
12032 if (location_wrapper_p (exp))
12033 return true;
12034 if (!CONVERT_EXPR_P (exp)
12035 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12036 return false;
12038 outer_type = TREE_TYPE (exp);
12039 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12040 if (!inner_type || inner_type == error_mark_node)
12041 return false;
12043 return tree_nop_conversion_p (outer_type, inner_type);
12046 /* Return true iff conversion in EXP generates no instruction. Don't
12047 consider conversions changing the signedness. */
12049 static bool
12050 tree_sign_nop_conversion (const_tree exp)
12052 tree outer_type, inner_type;
12054 if (!tree_nop_conversion (exp))
12055 return false;
12057 outer_type = TREE_TYPE (exp);
12058 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12060 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12061 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12064 /* Strip conversions from EXP according to tree_nop_conversion and
12065 return the resulting expression. */
12067 tree
12068 tree_strip_nop_conversions (tree exp)
12070 while (tree_nop_conversion (exp))
12071 exp = TREE_OPERAND (exp, 0);
12072 return exp;
12075 /* Strip conversions from EXP according to tree_sign_nop_conversion
12076 and return the resulting expression. */
12078 tree
12079 tree_strip_sign_nop_conversions (tree exp)
12081 while (tree_sign_nop_conversion (exp))
12082 exp = TREE_OPERAND (exp, 0);
12083 return exp;
12086 /* Avoid any floating point extensions from EXP. */
12087 tree
12088 strip_float_extensions (tree exp)
12090 tree sub, expt, subt;
12092 /* For floating point constant look up the narrowest type that can hold
12093 it properly and handle it like (type)(narrowest_type)constant.
12094 This way we can optimize for instance a=a*2.0 where "a" is float
12095 but 2.0 is double constant. */
12096 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12098 REAL_VALUE_TYPE orig;
12099 tree type = NULL;
12101 orig = TREE_REAL_CST (exp);
12102 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12103 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12104 type = float_type_node;
12105 else if (TYPE_PRECISION (TREE_TYPE (exp))
12106 > TYPE_PRECISION (double_type_node)
12107 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12108 type = double_type_node;
12109 if (type)
12110 return build_real_truncate (type, orig);
12113 if (!CONVERT_EXPR_P (exp))
12114 return exp;
12116 sub = TREE_OPERAND (exp, 0);
12117 subt = TREE_TYPE (sub);
12118 expt = TREE_TYPE (exp);
12120 if (!FLOAT_TYPE_P (subt))
12121 return exp;
12123 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12124 return exp;
12126 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12127 return exp;
12129 return strip_float_extensions (sub);
12132 /* Strip out all handled components that produce invariant
12133 offsets. */
12135 const_tree
12136 strip_invariant_refs (const_tree op)
12138 while (handled_component_p (op))
12140 switch (TREE_CODE (op))
12142 case ARRAY_REF:
12143 case ARRAY_RANGE_REF:
12144 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12145 || TREE_OPERAND (op, 2) != NULL_TREE
12146 || TREE_OPERAND (op, 3) != NULL_TREE)
12147 return NULL;
12148 break;
12150 case COMPONENT_REF:
12151 if (TREE_OPERAND (op, 2) != NULL_TREE)
12152 return NULL;
12153 break;
12155 default:;
12157 op = TREE_OPERAND (op, 0);
12160 return op;
12163 /* Strip handled components with zero offset from OP. */
12165 tree
12166 strip_zero_offset_components (tree op)
12168 while (TREE_CODE (op) == COMPONENT_REF
12169 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12170 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12171 op = TREE_OPERAND (op, 0);
12172 return op;
12175 static GTY(()) tree gcc_eh_personality_decl;
12177 /* Return the GCC personality function decl. */
12179 tree
12180 lhd_gcc_personality (void)
12182 if (!gcc_eh_personality_decl)
12183 gcc_eh_personality_decl = build_personality_function ("gcc");
12184 return gcc_eh_personality_decl;
12187 /* TARGET is a call target of GIMPLE call statement
12188 (obtained by gimple_call_fn). Return true if it is
12189 OBJ_TYPE_REF representing an virtual call of C++ method.
12190 (As opposed to OBJ_TYPE_REF representing objc calls
12191 through a cast where middle-end devirtualization machinery
12192 can't apply.) FOR_DUMP_P is true when being called from
12193 the dump routines. */
12195 bool
12196 virtual_method_call_p (const_tree target, bool for_dump_p)
12198 if (TREE_CODE (target) != OBJ_TYPE_REF)
12199 return false;
12200 tree t = TREE_TYPE (target);
12201 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12202 t = TREE_TYPE (t);
12203 if (TREE_CODE (t) == FUNCTION_TYPE)
12204 return false;
12205 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12206 /* If we do not have BINFO associated, it means that type was built
12207 without devirtualization enabled. Do not consider this a virtual
12208 call. */
12209 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12210 return false;
12211 return true;
12214 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12216 static tree
12217 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12219 unsigned int i;
12220 tree base_binfo, b;
12222 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12223 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12224 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12225 return base_binfo;
12226 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12227 return b;
12228 return NULL;
12231 /* Try to find a base info of BINFO that would have its field decl at offset
12232 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12233 found, return, otherwise return NULL_TREE. */
12235 tree
12236 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12238 tree type = BINFO_TYPE (binfo);
12240 while (true)
12242 HOST_WIDE_INT pos, size;
12243 tree fld;
12244 int i;
12246 if (types_same_for_odr (type, expected_type))
12247 return binfo;
12248 if (maybe_lt (offset, 0))
12249 return NULL_TREE;
12251 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12253 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12254 continue;
12256 pos = int_bit_position (fld);
12257 size = tree_to_uhwi (DECL_SIZE (fld));
12258 if (known_in_range_p (offset, pos, size))
12259 break;
12261 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12262 return NULL_TREE;
12264 /* Offset 0 indicates the primary base, whose vtable contents are
12265 represented in the binfo for the derived class. */
12266 else if (maybe_ne (offset, 0))
12268 tree found_binfo = NULL, base_binfo;
12269 /* Offsets in BINFO are in bytes relative to the whole structure
12270 while POS is in bits relative to the containing field. */
12271 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12272 / BITS_PER_UNIT);
12274 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12275 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12276 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12278 found_binfo = base_binfo;
12279 break;
12281 if (found_binfo)
12282 binfo = found_binfo;
12283 else
12284 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12285 binfo_offset);
12288 type = TREE_TYPE (fld);
12289 offset -= pos;
12293 /* PR 84195: Replace control characters in "unescaped" with their
12294 escaped equivalents. Allow newlines if -fmessage-length has
12295 been set to a non-zero value. This is done here, rather than
12296 where the attribute is recorded as the message length can
12297 change between these two locations. */
12299 void
12300 escaped_string::escape (const char *unescaped)
12302 char *escaped;
12303 size_t i, new_i, len;
12305 if (m_owned)
12306 free (m_str);
12308 m_str = const_cast<char *> (unescaped);
12309 m_owned = false;
12311 if (unescaped == NULL || *unescaped == 0)
12312 return;
12314 len = strlen (unescaped);
12315 escaped = NULL;
12316 new_i = 0;
12318 for (i = 0; i < len; i++)
12320 char c = unescaped[i];
12322 if (!ISCNTRL (c))
12324 if (escaped)
12325 escaped[new_i++] = c;
12326 continue;
12329 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12331 if (escaped == NULL)
12333 /* We only allocate space for a new string if we
12334 actually encounter a control character that
12335 needs replacing. */
12336 escaped = (char *) xmalloc (len * 2 + 1);
12337 strncpy (escaped, unescaped, i);
12338 new_i = i;
12341 escaped[new_i++] = '\\';
12343 switch (c)
12345 case '\a': escaped[new_i++] = 'a'; break;
12346 case '\b': escaped[new_i++] = 'b'; break;
12347 case '\f': escaped[new_i++] = 'f'; break;
12348 case '\n': escaped[new_i++] = 'n'; break;
12349 case '\r': escaped[new_i++] = 'r'; break;
12350 case '\t': escaped[new_i++] = 't'; break;
12351 case '\v': escaped[new_i++] = 'v'; break;
12352 default: escaped[new_i++] = '?'; break;
12355 else if (escaped)
12356 escaped[new_i++] = c;
12359 if (escaped)
12361 escaped[new_i] = 0;
12362 m_str = escaped;
12363 m_owned = true;
12367 /* Warn about a use of an identifier which was marked deprecated. Returns
12368 whether a warning was given. */
12370 bool
12371 warn_deprecated_use (tree node, tree attr)
12373 escaped_string msg;
12375 if (node == 0 || !warn_deprecated_decl)
12376 return false;
12378 if (!attr)
12380 if (DECL_P (node))
12381 attr = DECL_ATTRIBUTES (node);
12382 else if (TYPE_P (node))
12384 tree decl = TYPE_STUB_DECL (node);
12385 if (decl)
12386 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12387 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12388 != NULL_TREE)
12390 node = TREE_TYPE (decl);
12391 attr = TYPE_ATTRIBUTES (node);
12396 if (attr)
12397 attr = lookup_attribute ("deprecated", attr);
12399 if (attr)
12400 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12402 bool w = false;
12403 if (DECL_P (node))
12405 auto_diagnostic_group d;
12406 if (msg)
12407 w = warning (OPT_Wdeprecated_declarations,
12408 "%qD is deprecated: %s", node, (const char *) msg);
12409 else
12410 w = warning (OPT_Wdeprecated_declarations,
12411 "%qD is deprecated", node);
12412 if (w)
12413 inform (DECL_SOURCE_LOCATION (node), "declared here");
12415 else if (TYPE_P (node))
12417 tree what = NULL_TREE;
12418 tree decl = TYPE_STUB_DECL (node);
12420 if (TYPE_NAME (node))
12422 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12423 what = TYPE_NAME (node);
12424 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12425 && DECL_NAME (TYPE_NAME (node)))
12426 what = DECL_NAME (TYPE_NAME (node));
12429 auto_diagnostic_group d;
12430 if (what)
12432 if (msg)
12433 w = warning (OPT_Wdeprecated_declarations,
12434 "%qE is deprecated: %s", what, (const char *) msg);
12435 else
12436 w = warning (OPT_Wdeprecated_declarations,
12437 "%qE is deprecated", what);
12439 else
12441 if (msg)
12442 w = warning (OPT_Wdeprecated_declarations,
12443 "type is deprecated: %s", (const char *) msg);
12444 else
12445 w = warning (OPT_Wdeprecated_declarations,
12446 "type is deprecated");
12449 if (w && decl)
12450 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12453 return w;
12456 /* Error out with an identifier which was marked 'unavailable'. */
12457 void
12458 error_unavailable_use (tree node, tree attr)
12460 escaped_string msg;
12462 if (node == 0)
12463 return;
12465 if (!attr)
12467 if (DECL_P (node))
12468 attr = DECL_ATTRIBUTES (node);
12469 else if (TYPE_P (node))
12471 tree decl = TYPE_STUB_DECL (node);
12472 if (decl)
12473 attr = lookup_attribute ("unavailable",
12474 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12478 if (attr)
12479 attr = lookup_attribute ("unavailable", attr);
12481 if (attr)
12482 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12484 if (DECL_P (node))
12486 auto_diagnostic_group d;
12487 if (msg)
12488 error ("%qD is unavailable: %s", node, (const char *) msg);
12489 else
12490 error ("%qD is unavailable", node);
12491 inform (DECL_SOURCE_LOCATION (node), "declared here");
12493 else if (TYPE_P (node))
12495 tree what = NULL_TREE;
12496 tree decl = TYPE_STUB_DECL (node);
12498 if (TYPE_NAME (node))
12500 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12501 what = TYPE_NAME (node);
12502 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12503 && DECL_NAME (TYPE_NAME (node)))
12504 what = DECL_NAME (TYPE_NAME (node));
12507 auto_diagnostic_group d;
12508 if (what)
12510 if (msg)
12511 error ("%qE is unavailable: %s", what, (const char *) msg);
12512 else
12513 error ("%qE is unavailable", what);
12515 else
12517 if (msg)
12518 error ("type is unavailable: %s", (const char *) msg);
12519 else
12520 error ("type is unavailable");
12523 if (decl)
12524 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12528 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12529 somewhere in it. */
12531 bool
12532 contains_bitfld_component_ref_p (const_tree ref)
12534 while (handled_component_p (ref))
12536 if (TREE_CODE (ref) == COMPONENT_REF
12537 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12538 return true;
12539 ref = TREE_OPERAND (ref, 0);
12542 return false;
12545 /* Try to determine whether a TRY_CATCH expression can fall through.
12546 This is a subroutine of block_may_fallthru. */
12548 static bool
12549 try_catch_may_fallthru (const_tree stmt)
12551 tree_stmt_iterator i;
12553 /* If the TRY block can fall through, the whole TRY_CATCH can
12554 fall through. */
12555 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12556 return true;
12558 i = tsi_start (TREE_OPERAND (stmt, 1));
12559 switch (TREE_CODE (tsi_stmt (i)))
12561 case CATCH_EXPR:
12562 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12563 catch expression and a body. The whole TRY_CATCH may fall
12564 through iff any of the catch bodies falls through. */
12565 for (; !tsi_end_p (i); tsi_next (&i))
12567 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12568 return true;
12570 return false;
12572 case EH_FILTER_EXPR:
12573 /* The exception filter expression only matters if there is an
12574 exception. If the exception does not match EH_FILTER_TYPES,
12575 we will execute EH_FILTER_FAILURE, and we will fall through
12576 if that falls through. If the exception does match
12577 EH_FILTER_TYPES, the stack unwinder will continue up the
12578 stack, so we will not fall through. We don't know whether we
12579 will throw an exception which matches EH_FILTER_TYPES or not,
12580 so we just ignore EH_FILTER_TYPES and assume that we might
12581 throw an exception which doesn't match. */
12582 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12584 default:
12585 /* This case represents statements to be executed when an
12586 exception occurs. Those statements are implicitly followed
12587 by a RESX statement to resume execution after the exception.
12588 So in this case the TRY_CATCH never falls through. */
12589 return false;
12593 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12594 need not be 100% accurate; simply be conservative and return true if we
12595 don't know. This is used only to avoid stupidly generating extra code.
12596 If we're wrong, we'll just delete the extra code later. */
12598 bool
12599 block_may_fallthru (const_tree block)
12601 /* This CONST_CAST is okay because expr_last returns its argument
12602 unmodified and we assign it to a const_tree. */
12603 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12605 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12607 case GOTO_EXPR:
12608 case RETURN_EXPR:
12609 /* Easy cases. If the last statement of the block implies
12610 control transfer, then we can't fall through. */
12611 return false;
12613 case SWITCH_EXPR:
12614 /* If there is a default: label or case labels cover all possible
12615 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12616 to some case label in all cases and all we care is whether the
12617 SWITCH_BODY falls through. */
12618 if (SWITCH_ALL_CASES_P (stmt))
12619 return block_may_fallthru (SWITCH_BODY (stmt));
12620 return true;
12622 case COND_EXPR:
12623 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12624 return true;
12625 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12627 case BIND_EXPR:
12628 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12630 case TRY_CATCH_EXPR:
12631 return try_catch_may_fallthru (stmt);
12633 case TRY_FINALLY_EXPR:
12634 /* The finally clause is always executed after the try clause,
12635 so if it does not fall through, then the try-finally will not
12636 fall through. Otherwise, if the try clause does not fall
12637 through, then when the finally clause falls through it will
12638 resume execution wherever the try clause was going. So the
12639 whole try-finally will only fall through if both the try
12640 clause and the finally clause fall through. */
12641 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12642 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12644 case EH_ELSE_EXPR:
12645 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12647 case MODIFY_EXPR:
12648 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12649 stmt = TREE_OPERAND (stmt, 1);
12650 else
12651 return true;
12652 /* FALLTHRU */
12654 case CALL_EXPR:
12655 /* Functions that do not return do not fall through. */
12656 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12658 case CLEANUP_POINT_EXPR:
12659 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12661 case TARGET_EXPR:
12662 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12664 case ERROR_MARK:
12665 return true;
12667 default:
12668 return lang_hooks.block_may_fallthru (stmt);
12672 /* True if we are using EH to handle cleanups. */
12673 static bool using_eh_for_cleanups_flag = false;
12675 /* This routine is called from front ends to indicate eh should be used for
12676 cleanups. */
12677 void
12678 using_eh_for_cleanups (void)
12680 using_eh_for_cleanups_flag = true;
12683 /* Query whether EH is used for cleanups. */
12684 bool
12685 using_eh_for_cleanups_p (void)
12687 return using_eh_for_cleanups_flag;
12690 /* Wrapper for tree_code_name to ensure that tree code is valid */
12691 const char *
12692 get_tree_code_name (enum tree_code code)
12694 const char *invalid = "<invalid tree code>";
12696 /* The tree_code enum promotes to signed, but we could be getting
12697 invalid values, so force an unsigned comparison. */
12698 if (unsigned (code) >= MAX_TREE_CODES)
12700 if ((unsigned)code == 0xa5a5)
12701 return "ggc_freed";
12702 return invalid;
12705 return tree_code_name[code];
12708 /* Drops the TREE_OVERFLOW flag from T. */
12710 tree
12711 drop_tree_overflow (tree t)
12713 gcc_checking_assert (TREE_OVERFLOW (t));
12715 /* For tree codes with a sharing machinery re-build the result. */
12716 if (poly_int_tree_p (t))
12717 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12719 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12720 and canonicalize the result. */
12721 if (TREE_CODE (t) == VECTOR_CST)
12723 tree_vector_builder builder;
12724 builder.new_unary_operation (TREE_TYPE (t), t, true);
12725 unsigned int count = builder.encoded_nelts ();
12726 for (unsigned int i = 0; i < count; ++i)
12728 tree elt = VECTOR_CST_ELT (t, i);
12729 if (TREE_OVERFLOW (elt))
12730 elt = drop_tree_overflow (elt);
12731 builder.quick_push (elt);
12733 return builder.build ();
12736 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12737 and drop the flag. */
12738 t = copy_node (t);
12739 TREE_OVERFLOW (t) = 0;
12741 /* For constants that contain nested constants, drop the flag
12742 from those as well. */
12743 if (TREE_CODE (t) == COMPLEX_CST)
12745 if (TREE_OVERFLOW (TREE_REALPART (t)))
12746 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12747 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12748 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12751 return t;
12754 /* Given a memory reference expression T, return its base address.
12755 The base address of a memory reference expression is the main
12756 object being referenced. For instance, the base address for
12757 'array[i].fld[j]' is 'array'. You can think of this as stripping
12758 away the offset part from a memory address.
12760 This function calls handled_component_p to strip away all the inner
12761 parts of the memory reference until it reaches the base object. */
12763 tree
12764 get_base_address (tree t)
12766 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12767 t = TREE_OPERAND (t, 0);
12768 while (handled_component_p (t))
12769 t = TREE_OPERAND (t, 0);
12771 if ((TREE_CODE (t) == MEM_REF
12772 || TREE_CODE (t) == TARGET_MEM_REF)
12773 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12774 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12776 return t;
12779 /* Return a tree of sizetype representing the size, in bytes, of the element
12780 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12782 tree
12783 array_ref_element_size (tree exp)
12785 tree aligned_size = TREE_OPERAND (exp, 3);
12786 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12787 location_t loc = EXPR_LOCATION (exp);
12789 /* If a size was specified in the ARRAY_REF, it's the size measured
12790 in alignment units of the element type. So multiply by that value. */
12791 if (aligned_size)
12793 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12794 sizetype from another type of the same width and signedness. */
12795 if (TREE_TYPE (aligned_size) != sizetype)
12796 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12797 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12798 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12801 /* Otherwise, take the size from that of the element type. Substitute
12802 any PLACEHOLDER_EXPR that we have. */
12803 else
12804 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12807 /* Return a tree representing the lower bound of the array mentioned in
12808 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12810 tree
12811 array_ref_low_bound (tree exp)
12813 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12815 /* If a lower bound is specified in EXP, use it. */
12816 if (TREE_OPERAND (exp, 2))
12817 return TREE_OPERAND (exp, 2);
12819 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12820 substituting for a PLACEHOLDER_EXPR as needed. */
12821 if (domain_type && TYPE_MIN_VALUE (domain_type))
12822 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12824 /* Otherwise, return a zero of the appropriate type. */
12825 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12826 return (idxtype == error_mark_node
12827 ? integer_zero_node : build_int_cst (idxtype, 0));
12830 /* Return a tree representing the upper bound of the array mentioned in
12831 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12833 tree
12834 array_ref_up_bound (tree exp)
12836 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12838 /* If there is a domain type and it has an upper bound, use it, substituting
12839 for a PLACEHOLDER_EXPR as needed. */
12840 if (domain_type && TYPE_MAX_VALUE (domain_type))
12841 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12843 /* Otherwise fail. */
12844 return NULL_TREE;
12847 /* Returns true if REF is an array reference, a component reference,
12848 or a memory reference to an array whose actual size might be larger
12849 than its upper bound implies, there are multiple cases:
12850 A. a ref to a flexible array member at the end of a structure;
12851 B. a ref to an array with a different type against the original decl;
12852 for example:
12854 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12855 (*((char(*)[16])&a[0]))[i+8]
12857 C. a ref to an array that was passed as a parameter;
12858 for example:
12860 int test (uint8_t *p, uint32_t t[1][1], int n) {
12861 for (int i = 0; i < 4; i++, p++)
12862 t[i][0] = ...;
12864 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12867 bool
12868 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12870 /* The TYPE for this array referece. */
12871 tree atype = NULL_TREE;
12872 /* The FIELD_DECL for the array field in the containing structure. */
12873 tree afield_decl = NULL_TREE;
12874 /* Whether this array is the trailing array of a structure. */
12875 bool is_trailing_array_tmp = false;
12876 if (!is_trailing_array)
12877 is_trailing_array = &is_trailing_array_tmp;
12879 if (TREE_CODE (ref) == ARRAY_REF
12880 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12882 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12883 ref = TREE_OPERAND (ref, 0);
12885 else if (TREE_CODE (ref) == COMPONENT_REF
12886 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12888 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12889 afield_decl = TREE_OPERAND (ref, 1);
12891 else if (TREE_CODE (ref) == MEM_REF)
12893 tree arg = TREE_OPERAND (ref, 0);
12894 if (TREE_CODE (arg) == ADDR_EXPR)
12895 arg = TREE_OPERAND (arg, 0);
12896 tree argtype = TREE_TYPE (arg);
12897 if (TREE_CODE (argtype) == RECORD_TYPE)
12899 if (tree fld = last_field (argtype))
12901 atype = TREE_TYPE (fld);
12902 afield_decl = fld;
12903 if (TREE_CODE (atype) != ARRAY_TYPE)
12904 return false;
12905 if (VAR_P (arg) && DECL_SIZE (fld))
12906 return false;
12908 else
12909 return false;
12911 else
12912 return false;
12914 else
12915 return false;
12917 if (TREE_CODE (ref) == STRING_CST)
12918 return false;
12920 tree ref_to_array = ref;
12921 while (handled_component_p (ref))
12923 /* If the reference chain contains a component reference to a
12924 non-union type and there follows another field the reference
12925 is not at the end of a structure. */
12926 if (TREE_CODE (ref) == COMPONENT_REF)
12928 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12930 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12931 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12932 nextf = DECL_CHAIN (nextf);
12933 if (nextf)
12934 return false;
12937 /* If we have a multi-dimensional array we do not consider
12938 a non-innermost dimension as flex array if the whole
12939 multi-dimensional array is at struct end.
12940 Same for an array of aggregates with a trailing array
12941 member. */
12942 else if (TREE_CODE (ref) == ARRAY_REF)
12943 return false;
12944 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12946 /* If we view an underlying object as sth else then what we
12947 gathered up to now is what we have to rely on. */
12948 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12949 break;
12950 else
12951 gcc_unreachable ();
12953 ref = TREE_OPERAND (ref, 0);
12956 gcc_assert (!afield_decl
12957 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12959 /* The array now is at struct end. Treat flexible array member as
12960 always subject to extend, even into just padding constrained by
12961 an underlying decl. */
12962 if (! TYPE_SIZE (atype)
12963 || ! TYPE_DOMAIN (atype)
12964 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12966 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12967 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12970 /* If the reference is based on a declared entity, the size of the array
12971 is constrained by its given domain. (Do not trust commons PR/69368). */
12972 ref = get_base_address (ref);
12973 if (ref
12974 && DECL_P (ref)
12975 && !(flag_unconstrained_commons
12976 && VAR_P (ref) && DECL_COMMON (ref))
12977 && DECL_SIZE_UNIT (ref)
12978 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12980 /* If the object itself is the array it is not at struct end. */
12981 if (DECL_P (ref_to_array))
12982 return false;
12984 /* Check whether the array domain covers all of the available
12985 padding. */
12986 poly_int64 offset;
12987 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12988 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12989 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12991 *is_trailing_array
12992 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12993 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12995 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12997 *is_trailing_array
12998 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12999 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13002 /* If at least one extra element fits it is a flexarray. */
13003 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13004 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13005 + 2)
13006 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13007 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13009 *is_trailing_array
13010 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13011 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13014 return false;
13017 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
13018 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
13022 /* Return a tree representing the offset, in bytes, of the field referenced
13023 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13025 tree
13026 component_ref_field_offset (tree exp)
13028 tree aligned_offset = TREE_OPERAND (exp, 2);
13029 tree field = TREE_OPERAND (exp, 1);
13030 location_t loc = EXPR_LOCATION (exp);
13032 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13033 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13034 value. */
13035 if (aligned_offset)
13037 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13038 sizetype from another type of the same width and signedness. */
13039 if (TREE_TYPE (aligned_offset) != sizetype)
13040 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13041 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13042 size_int (DECL_OFFSET_ALIGN (field)
13043 / BITS_PER_UNIT));
13046 /* Otherwise, take the offset from that of the field. Substitute
13047 any PLACEHOLDER_EXPR that we have. */
13048 else
13049 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13052 /* Given the initializer INIT, return the initializer for the field
13053 DECL if it exists, otherwise null. Used to obtain the initializer
13054 for a flexible array member and determine its size. */
13056 static tree
13057 get_initializer_for (tree init, tree decl)
13059 STRIP_NOPS (init);
13061 tree fld, fld_init;
13062 unsigned HOST_WIDE_INT i;
13063 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13065 if (decl == fld)
13066 return fld_init;
13068 if (TREE_CODE (fld) == CONSTRUCTOR)
13070 fld_init = get_initializer_for (fld_init, decl);
13071 if (fld_init)
13072 return fld_init;
13076 return NULL_TREE;
13079 /* Determines the special array member type for the array reference REF. */
13080 special_array_member
13081 component_ref_sam_type (tree ref)
13083 special_array_member sam_type = special_array_member::none;
13085 tree member = TREE_OPERAND (ref, 1);
13086 tree memsize = DECL_SIZE_UNIT (member);
13087 if (memsize)
13089 tree memtype = TREE_TYPE (member);
13090 if (TREE_CODE (memtype) != ARRAY_TYPE)
13091 return sam_type;
13093 bool trailing = false;
13094 (void) array_ref_flexible_size_p (ref, &trailing);
13095 bool zero_elts = integer_zerop (memsize);
13096 if (zero_elts && integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype))))
13098 /* If array element has zero size, verify if it is a flexible
13099 array member or zero length array. Clear zero_elts if
13100 it has one or more members or is a VLA member. */
13101 if (tree dom = TYPE_DOMAIN (memtype))
13102 if (tree min = TYPE_MIN_VALUE (dom))
13103 if (tree max = TYPE_MAX_VALUE (dom))
13104 if (TREE_CODE (min) != INTEGER_CST
13105 || TREE_CODE (max) != INTEGER_CST
13106 || !((integer_zerop (min) && integer_all_onesp (max))
13107 || tree_int_cst_lt (max, min)))
13108 zero_elts = false;
13110 if (!trailing && !zero_elts)
13111 /* MEMBER is an interior array with more than one element. */
13112 return special_array_member::int_n;
13114 if (zero_elts)
13116 if (trailing)
13117 return special_array_member::trail_0;
13118 else
13119 return special_array_member::int_0;
13122 if (!zero_elts)
13123 if (tree dom = TYPE_DOMAIN (memtype))
13124 if (tree min = TYPE_MIN_VALUE (dom))
13125 if (tree max = TYPE_MAX_VALUE (dom))
13126 if (TREE_CODE (min) == INTEGER_CST
13127 && TREE_CODE (max) == INTEGER_CST)
13129 offset_int minidx = wi::to_offset (min);
13130 offset_int maxidx = wi::to_offset (max);
13131 offset_int neltsm1 = maxidx - minidx;
13132 if (neltsm1 > 0)
13133 /* MEMBER is a trailing array with more than
13134 one elements. */
13135 return special_array_member::trail_n;
13137 if (neltsm1 == 0)
13138 return special_array_member::trail_1;
13142 return sam_type;
13145 /* Determines the size of the member referenced by the COMPONENT_REF
13146 REF, using its initializer expression if necessary in order to
13147 determine the size of an initialized flexible array member.
13148 If non-null, set *SAM to the type of special array member.
13149 Returns the size as sizetype (which might be zero for an object
13150 with an uninitialized flexible array member) or null if the size
13151 cannot be determined. */
13153 tree
13154 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13156 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13158 special_array_member sambuf;
13159 if (!sam)
13160 sam = &sambuf;
13161 *sam = component_ref_sam_type (ref);
13163 /* The object/argument referenced by the COMPONENT_REF and its type. */
13164 tree arg = TREE_OPERAND (ref, 0);
13165 tree argtype = TREE_TYPE (arg);
13166 /* The referenced member. */
13167 tree member = TREE_OPERAND (ref, 1);
13169 tree memsize = DECL_SIZE_UNIT (member);
13170 if (memsize)
13172 tree memtype = TREE_TYPE (member);
13173 if (TREE_CODE (memtype) != ARRAY_TYPE)
13174 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13175 to the type of a class with a virtual base which doesn't
13176 reflect the size of the virtual's members (see pr97595).
13177 If that's the case fail for now and implement something
13178 more robust in the future. */
13179 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13180 ? memsize : NULL_TREE);
13182 /* 2-or-more elements arrays are treated as normal arrays by default. */
13183 if (*sam == special_array_member::int_n
13184 || *sam == special_array_member::trail_n)
13185 return memsize;
13187 tree afield_decl = TREE_OPERAND (ref, 1);
13188 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13189 /* If the trailing array is a not a flexible array member, treat it as
13190 a normal array. */
13191 if (DECL_NOT_FLEXARRAY (afield_decl)
13192 && *sam != special_array_member::int_0)
13193 return memsize;
13195 if (*sam == special_array_member::int_0)
13196 memsize = NULL_TREE;
13198 /* For a reference to a flexible array member of a union
13199 use the size of the union instead of the size of the member. */
13200 if (TREE_CODE (argtype) == UNION_TYPE)
13201 memsize = TYPE_SIZE_UNIT (argtype);
13204 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13205 array member, or an array of length one treated as such. */
13207 /* If the reference is to a declared object and the member a true
13208 flexible array, try to determine its size from its initializer. */
13209 poly_int64 baseoff = 0;
13210 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13211 if (!base || !VAR_P (base))
13213 if (*sam != special_array_member::int_0)
13214 return NULL_TREE;
13216 if (TREE_CODE (arg) != COMPONENT_REF)
13217 return NULL_TREE;
13219 base = arg;
13220 while (TREE_CODE (base) == COMPONENT_REF)
13221 base = TREE_OPERAND (base, 0);
13222 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13225 /* BASE is the declared object of which MEMBER is either a member
13226 or that is cast to ARGTYPE (e.g., a char buffer used to store
13227 an ARGTYPE object). */
13228 tree basetype = TREE_TYPE (base);
13230 /* Determine the base type of the referenced object. If it's
13231 the same as ARGTYPE and MEMBER has a known size, return it. */
13232 tree bt = basetype;
13233 if (*sam != special_array_member::int_0)
13234 while (TREE_CODE (bt) == ARRAY_TYPE)
13235 bt = TREE_TYPE (bt);
13236 bool typematch = useless_type_conversion_p (argtype, bt);
13237 if (memsize && typematch)
13238 return memsize;
13240 memsize = NULL_TREE;
13242 if (typematch)
13243 /* MEMBER is a true flexible array member. Compute its size from
13244 the initializer of the BASE object if it has one. */
13245 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13246 if (init != error_mark_node)
13248 init = get_initializer_for (init, member);
13249 if (init)
13251 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13252 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13254 /* Use the larger of the initializer size and the tail
13255 padding in the enclosing struct. */
13256 poly_int64 rsz = tree_to_poly_int64 (refsize);
13257 rsz -= baseoff;
13258 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13259 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13262 baseoff = 0;
13266 if (!memsize)
13268 if (typematch)
13270 if (DECL_P (base)
13271 && DECL_EXTERNAL (base)
13272 && bt == basetype
13273 && *sam != special_array_member::int_0)
13274 /* The size of a flexible array member of an extern struct
13275 with no initializer cannot be determined (it's defined
13276 in another translation unit and can have an initializer
13277 with an arbitrary number of elements). */
13278 return NULL_TREE;
13280 /* Use the size of the base struct or, for interior zero-length
13281 arrays, the size of the enclosing type. */
13282 memsize = TYPE_SIZE_UNIT (bt);
13284 else if (DECL_P (base))
13285 /* Use the size of the BASE object (possibly an array of some
13286 other type such as char used to store the struct). */
13287 memsize = DECL_SIZE_UNIT (base);
13288 else
13289 return NULL_TREE;
13292 /* If the flexible array member has a known size use the greater
13293 of it and the tail padding in the enclosing struct.
13294 Otherwise, when the size of the flexible array member is unknown
13295 and the referenced object is not a struct, use the size of its
13296 type when known. This detects sizes of array buffers when cast
13297 to struct types with flexible array members. */
13298 if (memsize)
13300 if (!tree_fits_poly_int64_p (memsize))
13301 return NULL_TREE;
13302 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13303 if (known_lt (baseoff, memsz64))
13305 memsz64 -= baseoff;
13306 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13308 return size_zero_node;
13311 /* Return "don't know" for an external non-array object since its
13312 flexible array member can be initialized to have any number of
13313 elements. Otherwise, return zero because the flexible array
13314 member has no elements. */
13315 return (DECL_P (base)
13316 && DECL_EXTERNAL (base)
13317 && (!typematch
13318 || TREE_CODE (basetype) != ARRAY_TYPE)
13319 ? NULL_TREE : size_zero_node);
13322 /* Return the machine mode of T. For vectors, returns the mode of the
13323 inner type. The main use case is to feed the result to HONOR_NANS,
13324 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13326 machine_mode
13327 element_mode (const_tree t)
13329 if (!TYPE_P (t))
13330 t = TREE_TYPE (t);
13331 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13332 t = TREE_TYPE (t);
13333 return TYPE_MODE (t);
13336 /* Vector types need to re-check the target flags each time we report
13337 the machine mode. We need to do this because attribute target can
13338 change the result of vector_mode_supported_p and have_regs_of_mode
13339 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13340 change on a per-function basis. */
13341 /* ??? Possibly a better solution is to run through all the types
13342 referenced by a function and re-compute the TYPE_MODE once, rather
13343 than make the TYPE_MODE macro call a function. */
13345 machine_mode
13346 vector_type_mode (const_tree t)
13348 machine_mode mode;
13350 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13352 mode = t->type_common.mode;
13353 if (VECTOR_MODE_P (mode)
13354 && (!targetm.vector_mode_supported_p (mode)
13355 || !have_regs_of_mode[mode]))
13357 scalar_int_mode innermode;
13359 /* For integers, try mapping it to a same-sized scalar mode. */
13360 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13362 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13363 * GET_MODE_BITSIZE (innermode));
13364 scalar_int_mode mode;
13365 if (int_mode_for_size (size, 0).exists (&mode)
13366 && have_regs_of_mode[mode])
13367 return mode;
13370 return BLKmode;
13373 return mode;
13376 /* Return the size in bits of each element of vector type TYPE. */
13378 unsigned int
13379 vector_element_bits (const_tree type)
13381 gcc_checking_assert (VECTOR_TYPE_P (type));
13382 if (VECTOR_BOOLEAN_TYPE_P (type))
13383 return TYPE_PRECISION (TREE_TYPE (type));
13384 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13387 /* Calculate the size in bits of each element of vector type TYPE
13388 and return the result as a tree of type bitsizetype. */
13390 tree
13391 vector_element_bits_tree (const_tree type)
13393 gcc_checking_assert (VECTOR_TYPE_P (type));
13394 if (VECTOR_BOOLEAN_TYPE_P (type))
13395 return bitsize_int (vector_element_bits (type));
13396 return TYPE_SIZE (TREE_TYPE (type));
13399 /* Verify that basic properties of T match TV and thus T can be a variant of
13400 TV. TV should be the more specified variant (i.e. the main variant). */
13402 static bool
13403 verify_type_variant (const_tree t, tree tv)
13405 /* Type variant can differ by:
13407 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13408 ENCODE_QUAL_ADDR_SPACE.
13409 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13410 in this case some values may not be set in the variant types
13411 (see TYPE_COMPLETE_P checks).
13412 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13413 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13414 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13415 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13416 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13417 this is necessary to make it possible to merge types form different TUs
13418 - arrays, pointers and references may have TREE_TYPE that is a variant
13419 of TREE_TYPE of their main variants.
13420 - aggregates may have new TYPE_FIELDS list that list variants of
13421 the main variant TYPE_FIELDS.
13422 - vector types may differ by TYPE_VECTOR_OPAQUE
13425 /* Convenience macro for matching individual fields. */
13426 #define verify_variant_match(flag) \
13427 do { \
13428 if (flag (tv) != flag (t)) \
13430 error ("type variant differs by %s", #flag); \
13431 debug_tree (tv); \
13432 return false; \
13434 } while (false)
13436 /* tree_base checks. */
13438 verify_variant_match (TREE_CODE);
13439 /* FIXME: Ada builds non-artificial variants of artificial types. */
13440 #if 0
13441 if (TYPE_ARTIFICIAL (tv))
13442 verify_variant_match (TYPE_ARTIFICIAL);
13443 #endif
13444 if (POINTER_TYPE_P (tv))
13445 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13446 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13447 verify_variant_match (TYPE_UNSIGNED);
13448 verify_variant_match (TYPE_PACKED);
13449 if (TREE_CODE (t) == REFERENCE_TYPE)
13450 verify_variant_match (TYPE_REF_IS_RVALUE);
13451 if (AGGREGATE_TYPE_P (t))
13452 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13453 else
13454 verify_variant_match (TYPE_SATURATING);
13455 /* FIXME: This check trigger during libstdc++ build. */
13456 #if 0
13457 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13458 verify_variant_match (TYPE_FINAL_P);
13459 #endif
13461 /* tree_type_common checks. */
13463 if (COMPLETE_TYPE_P (t))
13465 verify_variant_match (TYPE_MODE);
13466 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13467 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13468 verify_variant_match (TYPE_SIZE);
13469 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13470 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13471 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13473 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13474 TYPE_SIZE_UNIT (tv), 0));
13475 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13476 debug_tree (tv);
13477 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13478 debug_tree (TYPE_SIZE_UNIT (tv));
13479 error ("type%'s %<TYPE_SIZE_UNIT%>");
13480 debug_tree (TYPE_SIZE_UNIT (t));
13481 return false;
13483 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13485 verify_variant_match (TYPE_PRECISION_RAW);
13486 if (RECORD_OR_UNION_TYPE_P (t))
13487 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13488 else if (TREE_CODE (t) == ARRAY_TYPE)
13489 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13490 /* During LTO we merge variant lists from diferent translation units
13491 that may differ BY TYPE_CONTEXT that in turn may point
13492 to TRANSLATION_UNIT_DECL.
13493 Ada also builds variants of types with different TYPE_CONTEXT. */
13494 #if 0
13495 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13496 verify_variant_match (TYPE_CONTEXT);
13497 #endif
13498 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13499 verify_variant_match (TYPE_STRING_FLAG);
13500 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13501 verify_variant_match (TYPE_CXX_ODR_P);
13502 if (TYPE_ALIAS_SET_KNOWN_P (t))
13504 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13505 debug_tree (tv);
13506 return false;
13509 /* tree_type_non_common checks. */
13511 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13512 and dangle the pointer from time to time. */
13513 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13514 && (in_lto_p || !TYPE_VFIELD (tv)
13515 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13517 error ("type variant has different %<TYPE_VFIELD%>");
13518 debug_tree (tv);
13519 return false;
13521 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13522 || TREE_CODE (t) == INTEGER_TYPE
13523 || TREE_CODE (t) == BOOLEAN_TYPE
13524 || TREE_CODE (t) == BITINT_TYPE
13525 || SCALAR_FLOAT_TYPE_P (t)
13526 || FIXED_POINT_TYPE_P (t))
13528 verify_variant_match (TYPE_MAX_VALUE);
13529 verify_variant_match (TYPE_MIN_VALUE);
13531 if (TREE_CODE (t) == METHOD_TYPE)
13532 verify_variant_match (TYPE_METHOD_BASETYPE);
13533 if (TREE_CODE (t) == OFFSET_TYPE)
13534 verify_variant_match (TYPE_OFFSET_BASETYPE);
13535 if (TREE_CODE (t) == ARRAY_TYPE)
13536 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13537 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13538 or even type's main variant. This is needed to make bootstrap pass
13539 and the bug seems new in GCC 5.
13540 C++ FE should be updated to make this consistent and we should check
13541 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13542 is a match with main variant.
13544 Also disable the check for Java for now because of parser hack that builds
13545 first an dummy BINFO and then sometimes replace it by real BINFO in some
13546 of the copies. */
13547 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13548 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13549 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13550 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13551 at LTO time only. */
13552 && (in_lto_p && odr_type_p (t)))
13554 error ("type variant has different %<TYPE_BINFO%>");
13555 debug_tree (tv);
13556 error ("type variant%'s %<TYPE_BINFO%>");
13557 debug_tree (TYPE_BINFO (tv));
13558 error ("type%'s %<TYPE_BINFO%>");
13559 debug_tree (TYPE_BINFO (t));
13560 return false;
13563 /* Check various uses of TYPE_VALUES_RAW. */
13564 if (TREE_CODE (t) == ENUMERAL_TYPE
13565 && TYPE_VALUES (t))
13566 verify_variant_match (TYPE_VALUES);
13567 else if (TREE_CODE (t) == ARRAY_TYPE)
13568 verify_variant_match (TYPE_DOMAIN);
13569 /* Permit incomplete variants of complete type. While FEs may complete
13570 all variants, this does not happen for C++ templates in all cases. */
13571 else if (RECORD_OR_UNION_TYPE_P (t)
13572 && COMPLETE_TYPE_P (t)
13573 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13575 tree f1, f2;
13577 /* Fortran builds qualified variants as new records with items of
13578 qualified type. Verify that they looks same. */
13579 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13580 f1 && f2;
13581 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13582 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13583 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13584 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13585 /* FIXME: gfc_nonrestricted_type builds all types as variants
13586 with exception of pointer types. It deeply copies the type
13587 which means that we may end up with a variant type
13588 referring non-variant pointer. We may change it to
13589 produce types as variants, too, like
13590 objc_get_protocol_qualified_type does. */
13591 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13592 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13593 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13594 break;
13595 if (f1 || f2)
13597 error ("type variant has different %<TYPE_FIELDS%>");
13598 debug_tree (tv);
13599 error ("first mismatch is field");
13600 debug_tree (f1);
13601 error ("and field");
13602 debug_tree (f2);
13603 return false;
13606 else if (FUNC_OR_METHOD_TYPE_P (t))
13607 verify_variant_match (TYPE_ARG_TYPES);
13608 /* For C++ the qualified variant of array type is really an array type
13609 of qualified TREE_TYPE.
13610 objc builds variants of pointer where pointer to type is a variant, too
13611 in objc_get_protocol_qualified_type. */
13612 if (TREE_TYPE (t) != TREE_TYPE (tv)
13613 && ((TREE_CODE (t) != ARRAY_TYPE
13614 && !POINTER_TYPE_P (t))
13615 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13616 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13618 error ("type variant has different %<TREE_TYPE%>");
13619 debug_tree (tv);
13620 error ("type variant%'s %<TREE_TYPE%>");
13621 debug_tree (TREE_TYPE (tv));
13622 error ("type%'s %<TREE_TYPE%>");
13623 debug_tree (TREE_TYPE (t));
13624 return false;
13626 if (type_with_alias_set_p (t)
13627 && !gimple_canonical_types_compatible_p (t, tv, false))
13629 error ("type is not compatible with its variant");
13630 debug_tree (tv);
13631 error ("type variant%'s %<TREE_TYPE%>");
13632 debug_tree (TREE_TYPE (tv));
13633 error ("type%'s %<TREE_TYPE%>");
13634 debug_tree (TREE_TYPE (t));
13635 return false;
13637 return true;
13638 #undef verify_variant_match
13642 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13643 the middle-end types_compatible_p function. It needs to avoid
13644 claiming types are different for types that should be treated
13645 the same with respect to TBAA. Canonical types are also used
13646 for IL consistency checks via the useless_type_conversion_p
13647 predicate which does not handle all type kinds itself but falls
13648 back to pointer-comparison of TYPE_CANONICAL for aggregates
13649 for example. */
13651 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13652 type calculation because we need to allow inter-operability between signed
13653 and unsigned variants. */
13655 bool
13656 type_with_interoperable_signedness (const_tree type)
13658 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13659 signed char and unsigned char. Similarly fortran FE builds
13660 C_SIZE_T as signed type, while C defines it unsigned. */
13662 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13663 == INTEGER_TYPE
13664 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13665 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13668 /* Return true iff T1 and T2 are structurally identical for what
13669 TBAA is concerned.
13670 This function is used both by lto.cc canonical type merging and by the
13671 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13672 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13673 only for LTO because only in these cases TYPE_CANONICAL equivalence
13674 correspond to one defined by gimple_canonical_types_compatible_p. */
13676 bool
13677 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13678 bool trust_type_canonical)
13680 /* Type variants should be same as the main variant. When not doing sanity
13681 checking to verify this fact, go to main variants and save some work. */
13682 if (trust_type_canonical)
13684 t1 = TYPE_MAIN_VARIANT (t1);
13685 t2 = TYPE_MAIN_VARIANT (t2);
13688 /* Check first for the obvious case of pointer identity. */
13689 if (t1 == t2)
13690 return true;
13692 /* Check that we have two types to compare. */
13693 if (t1 == NULL_TREE || t2 == NULL_TREE)
13694 return false;
13696 /* We consider complete types always compatible with incomplete type.
13697 This does not make sense for canonical type calculation and thus we
13698 need to ensure that we are never called on it.
13700 FIXME: For more correctness the function probably should have three modes
13701 1) mode assuming that types are complete mathcing their structure
13702 2) mode allowing incomplete types but producing equivalence classes
13703 and thus ignoring all info from complete types
13704 3) mode allowing incomplete types to match complete but checking
13705 compatibility between complete types.
13707 1 and 2 can be used for canonical type calculation. 3 is the real
13708 definition of type compatibility that can be used i.e. for warnings during
13709 declaration merging. */
13711 gcc_assert (!trust_type_canonical
13712 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13714 /* If the types have been previously registered and found equal
13715 they still are. */
13717 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13718 && trust_type_canonical)
13720 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13721 they are always NULL, but they are set to non-NULL for types
13722 constructed by build_pointer_type and variants. In this case the
13723 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13724 all pointers are considered equal. Be sure to not return false
13725 negatives. */
13726 gcc_checking_assert (canonical_type_used_p (t1)
13727 && canonical_type_used_p (t2));
13728 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13731 /* For types where we do ODR based TBAA the canonical type is always
13732 set correctly, so we know that types are different if their
13733 canonical types does not match. */
13734 if (trust_type_canonical
13735 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13736 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13737 return false;
13739 /* Can't be the same type if the types don't have the same code. */
13740 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13741 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13742 return false;
13744 /* Qualifiers do not matter for canonical type comparison purposes. */
13746 /* Void types and nullptr types are always the same. */
13747 if (VOID_TYPE_P (t1)
13748 || TREE_CODE (t1) == NULLPTR_TYPE)
13749 return true;
13751 /* Can't be the same type if they have different mode. */
13752 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13753 return false;
13755 /* Non-aggregate types can be handled cheaply. */
13756 if (INTEGRAL_TYPE_P (t1)
13757 || SCALAR_FLOAT_TYPE_P (t1)
13758 || FIXED_POINT_TYPE_P (t1)
13759 || VECTOR_TYPE_P (t1)
13760 || TREE_CODE (t1) == COMPLEX_TYPE
13761 || TREE_CODE (t1) == OFFSET_TYPE
13762 || POINTER_TYPE_P (t1))
13764 /* Can't be the same type if they have different precision. */
13765 if (TYPE_PRECISION_RAW (t1) != TYPE_PRECISION_RAW (t2))
13766 return false;
13768 /* In some cases the signed and unsigned types are required to be
13769 inter-operable. */
13770 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13771 && !type_with_interoperable_signedness (t1))
13772 return false;
13774 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13775 interoperable with "signed char". Unless all frontends are revisited
13776 to agree on these types, we must ignore the flag completely. */
13778 /* Fortran standard define C_PTR type that is compatible with every
13779 C pointer. For this reason we need to glob all pointers into one.
13780 Still pointers in different address spaces are not compatible. */
13781 if (POINTER_TYPE_P (t1))
13783 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13784 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13785 return false;
13788 /* Tail-recurse to components. */
13789 if (VECTOR_TYPE_P (t1)
13790 || TREE_CODE (t1) == COMPLEX_TYPE)
13791 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13792 TREE_TYPE (t2),
13793 trust_type_canonical);
13795 return true;
13798 /* Do type-specific comparisons. */
13799 switch (TREE_CODE (t1))
13801 case ARRAY_TYPE:
13802 /* Array types are the same if the element types are the same and
13803 the number of elements are the same. */
13804 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13805 trust_type_canonical)
13806 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13807 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13808 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13809 return false;
13810 else
13812 tree i1 = TYPE_DOMAIN (t1);
13813 tree i2 = TYPE_DOMAIN (t2);
13815 /* For an incomplete external array, the type domain can be
13816 NULL_TREE. Check this condition also. */
13817 if (i1 == NULL_TREE && i2 == NULL_TREE)
13818 return true;
13819 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13820 return false;
13821 else
13823 tree min1 = TYPE_MIN_VALUE (i1);
13824 tree min2 = TYPE_MIN_VALUE (i2);
13825 tree max1 = TYPE_MAX_VALUE (i1);
13826 tree max2 = TYPE_MAX_VALUE (i2);
13828 /* The minimum/maximum values have to be the same. */
13829 if ((min1 == min2
13830 || (min1 && min2
13831 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13832 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13833 || operand_equal_p (min1, min2, 0))))
13834 && (max1 == max2
13835 || (max1 && max2
13836 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13837 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13838 || operand_equal_p (max1, max2, 0)))))
13839 return true;
13840 else
13841 return false;
13845 case METHOD_TYPE:
13846 case FUNCTION_TYPE:
13847 /* Function types are the same if the return type and arguments types
13848 are the same. */
13849 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13850 trust_type_canonical))
13851 return false;
13853 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13854 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13855 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13856 return true;
13857 else
13859 tree parms1, parms2;
13861 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13862 parms1 && parms2;
13863 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13865 if (!gimple_canonical_types_compatible_p
13866 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13867 trust_type_canonical))
13868 return false;
13871 if (parms1 || parms2)
13872 return false;
13874 return true;
13877 case RECORD_TYPE:
13878 case UNION_TYPE:
13879 case QUAL_UNION_TYPE:
13881 tree f1, f2;
13883 /* Don't try to compare variants of an incomplete type, before
13884 TYPE_FIELDS has been copied around. */
13885 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13886 return true;
13889 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13890 return false;
13892 /* For aggregate types, all the fields must be the same. */
13893 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13894 f1 || f2;
13895 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13897 /* Skip non-fields and zero-sized fields. */
13898 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13899 || (DECL_SIZE (f1)
13900 && integer_zerop (DECL_SIZE (f1)))))
13901 f1 = TREE_CHAIN (f1);
13902 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13903 || (DECL_SIZE (f2)
13904 && integer_zerop (DECL_SIZE (f2)))))
13905 f2 = TREE_CHAIN (f2);
13906 if (!f1 || !f2)
13907 break;
13908 /* The fields must have the same name, offset and type. */
13909 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13910 || !gimple_compare_field_offset (f1, f2)
13911 || !gimple_canonical_types_compatible_p
13912 (TREE_TYPE (f1), TREE_TYPE (f2),
13913 trust_type_canonical))
13914 return false;
13917 /* If one aggregate has more fields than the other, they
13918 are not the same. */
13919 if (f1 || f2)
13920 return false;
13922 return true;
13925 default:
13926 /* Consider all types with language specific trees in them mutually
13927 compatible. This is executed only from verify_type and false
13928 positives can be tolerated. */
13929 gcc_assert (!in_lto_p);
13930 return true;
13934 /* For OPAQUE_TYPE T, it should have only size and alignment information
13935 and its mode should be of class MODE_OPAQUE. This function verifies
13936 these properties of T match TV which is the main variant of T and TC
13937 which is the canonical of T. */
13939 static void
13940 verify_opaque_type (const_tree t, tree tv, tree tc)
13942 gcc_assert (OPAQUE_TYPE_P (t));
13943 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13944 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13946 /* For an opaque type T1, check if some of its properties match
13947 the corresponding ones of the other opaque type T2, emit some
13948 error messages for those inconsistent ones. */
13949 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13950 const char *kind_msg)
13952 if (!OPAQUE_TYPE_P (t2))
13954 error ("type %s is not an opaque type", kind_msg);
13955 debug_tree (t2);
13956 return;
13958 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13960 error ("type %s is not with opaque mode", kind_msg);
13961 debug_tree (t2);
13962 return;
13964 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13966 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13967 debug_tree (t2);
13968 return;
13970 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13971 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13972 if (maybe_ne (t1_size, t2_size))
13974 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13975 debug_tree (t2);
13976 return;
13978 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13980 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13981 debug_tree (t2);
13982 return;
13984 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13986 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13987 debug_tree (t2);
13988 return;
13992 if (t != tv)
13993 check_properties_for_opaque_type (t, tv, "variant");
13995 if (t != tc)
13996 check_properties_for_opaque_type (t, tc, "canonical");
13999 /* Verify type T. */
14001 void
14002 verify_type (const_tree t)
14004 bool error_found = false;
14005 tree mv = TYPE_MAIN_VARIANT (t);
14006 tree ct = TYPE_CANONICAL (t);
14008 if (OPAQUE_TYPE_P (t))
14010 verify_opaque_type (t, mv, ct);
14011 return;
14014 if (!mv)
14016 error ("main variant is not defined");
14017 error_found = true;
14019 else if (mv != TYPE_MAIN_VARIANT (mv))
14021 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14022 debug_tree (mv);
14023 error_found = true;
14025 else if (t != mv && !verify_type_variant (t, mv))
14026 error_found = true;
14028 if (!ct)
14030 else if (TYPE_CANONICAL (ct) != ct)
14032 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14033 debug_tree (ct);
14034 error_found = true;
14036 /* Method and function types cannot be used to address memory and thus
14037 TYPE_CANONICAL really matters only for determining useless conversions.
14039 FIXME: C++ FE produce declarations of builtin functions that are not
14040 compatible with main variants. */
14041 else if (TREE_CODE (t) == FUNCTION_TYPE)
14043 else if (t != ct
14044 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14045 with variably sized arrays because their sizes possibly
14046 gimplified to different variables. */
14047 && !variably_modified_type_p (ct, NULL)
14048 && !gimple_canonical_types_compatible_p (t, ct, false)
14049 && COMPLETE_TYPE_P (t))
14051 error ("%<TYPE_CANONICAL%> is not compatible");
14052 debug_tree (ct);
14053 error_found = true;
14056 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14057 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14059 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14060 debug_tree (ct);
14061 error_found = true;
14063 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14065 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14066 debug_tree (ct);
14067 debug_tree (TYPE_MAIN_VARIANT (ct));
14068 error_found = true;
14072 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14073 if (RECORD_OR_UNION_TYPE_P (t))
14075 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14076 and danagle the pointer from time to time. */
14077 if (TYPE_VFIELD (t)
14078 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14079 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14081 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14082 debug_tree (TYPE_VFIELD (t));
14083 error_found = true;
14086 else if (TREE_CODE (t) == POINTER_TYPE)
14088 if (TYPE_NEXT_PTR_TO (t)
14089 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14091 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14092 debug_tree (TYPE_NEXT_PTR_TO (t));
14093 error_found = true;
14096 else if (TREE_CODE (t) == REFERENCE_TYPE)
14098 if (TYPE_NEXT_REF_TO (t)
14099 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14101 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14102 debug_tree (TYPE_NEXT_REF_TO (t));
14103 error_found = true;
14106 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14107 || FIXED_POINT_TYPE_P (t))
14109 /* FIXME: The following check should pass:
14110 useless_type_conversion_p (const_cast <tree> (t),
14111 TREE_TYPE (TYPE_MIN_VALUE (t))
14112 but does not for C sizetypes in LTO. */
14115 /* Check various uses of TYPE_MAXVAL_RAW. */
14116 if (RECORD_OR_UNION_TYPE_P (t))
14118 if (!TYPE_BINFO (t))
14120 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14122 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14123 debug_tree (TYPE_BINFO (t));
14124 error_found = true;
14126 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14128 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14129 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14130 error_found = true;
14133 else if (FUNC_OR_METHOD_TYPE_P (t))
14135 if (TYPE_METHOD_BASETYPE (t)
14136 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14137 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14139 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14140 debug_tree (TYPE_METHOD_BASETYPE (t));
14141 error_found = true;
14144 else if (TREE_CODE (t) == OFFSET_TYPE)
14146 if (TYPE_OFFSET_BASETYPE (t)
14147 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14148 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14150 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14151 debug_tree (TYPE_OFFSET_BASETYPE (t));
14152 error_found = true;
14155 else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
14156 || FIXED_POINT_TYPE_P (t))
14158 /* FIXME: The following check should pass:
14159 useless_type_conversion_p (const_cast <tree> (t),
14160 TREE_TYPE (TYPE_MAX_VALUE (t))
14161 but does not for C sizetypes in LTO. */
14163 else if (TREE_CODE (t) == ARRAY_TYPE)
14165 if (TYPE_ARRAY_MAX_SIZE (t)
14166 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14168 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14169 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14170 error_found = true;
14173 else if (TYPE_MAX_VALUE_RAW (t))
14175 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14176 debug_tree (TYPE_MAX_VALUE_RAW (t));
14177 error_found = true;
14180 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14182 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14183 debug_tree (TYPE_LANG_SLOT_1 (t));
14184 error_found = true;
14187 /* Check various uses of TYPE_VALUES_RAW. */
14188 if (TREE_CODE (t) == ENUMERAL_TYPE)
14189 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14191 tree value = TREE_VALUE (l);
14192 tree name = TREE_PURPOSE (l);
14194 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14195 CONST_DECL of ENUMERAL TYPE. */
14196 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14198 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14199 debug_tree (value);
14200 debug_tree (name);
14201 error_found = true;
14203 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14204 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14205 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14207 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14208 "to the enum");
14209 debug_tree (value);
14210 debug_tree (name);
14211 error_found = true;
14213 if (TREE_CODE (name) != IDENTIFIER_NODE)
14215 error ("enum value name is not %<IDENTIFIER_NODE%>");
14216 debug_tree (value);
14217 debug_tree (name);
14218 error_found = true;
14221 else if (TREE_CODE (t) == ARRAY_TYPE)
14223 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14225 error ("array %<TYPE_DOMAIN%> is not integer type");
14226 debug_tree (TYPE_DOMAIN (t));
14227 error_found = true;
14230 else if (RECORD_OR_UNION_TYPE_P (t))
14232 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14234 error ("%<TYPE_FIELDS%> defined in incomplete type");
14235 error_found = true;
14237 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14239 /* TODO: verify properties of decls. */
14240 if (TREE_CODE (fld) == FIELD_DECL)
14242 else if (TREE_CODE (fld) == TYPE_DECL)
14244 else if (TREE_CODE (fld) == CONST_DECL)
14246 else if (VAR_P (fld))
14248 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14250 else if (TREE_CODE (fld) == USING_DECL)
14252 else if (TREE_CODE (fld) == FUNCTION_DECL)
14254 else
14256 error ("wrong tree in %<TYPE_FIELDS%> list");
14257 debug_tree (fld);
14258 error_found = true;
14262 else if (TREE_CODE (t) == INTEGER_TYPE
14263 || TREE_CODE (t) == BOOLEAN_TYPE
14264 || TREE_CODE (t) == BITINT_TYPE
14265 || TREE_CODE (t) == OFFSET_TYPE
14266 || TREE_CODE (t) == REFERENCE_TYPE
14267 || TREE_CODE (t) == NULLPTR_TYPE
14268 || TREE_CODE (t) == POINTER_TYPE)
14270 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14272 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14273 "is %p",
14274 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14275 error_found = true;
14277 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14279 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14280 debug_tree (TYPE_CACHED_VALUES (t));
14281 error_found = true;
14283 /* Verify just enough of cache to ensure that no one copied it to new type.
14284 All copying should go by copy_node that should clear it. */
14285 else if (TYPE_CACHED_VALUES_P (t))
14287 int i;
14288 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14289 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14290 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14292 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14293 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14294 error_found = true;
14295 break;
14299 else if (FUNC_OR_METHOD_TYPE_P (t))
14300 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14302 /* C++ FE uses TREE_PURPOSE to store initial values. */
14303 if (TREE_PURPOSE (l) && in_lto_p)
14305 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14306 debug_tree (l);
14307 error_found = true;
14309 if (!TYPE_P (TREE_VALUE (l)))
14311 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14312 debug_tree (l);
14313 error_found = true;
14316 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14318 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14319 debug_tree (TYPE_VALUES_RAW (t));
14320 error_found = true;
14322 if (TREE_CODE (t) != INTEGER_TYPE
14323 && TREE_CODE (t) != BOOLEAN_TYPE
14324 && TREE_CODE (t) != BITINT_TYPE
14325 && TREE_CODE (t) != OFFSET_TYPE
14326 && TREE_CODE (t) != REFERENCE_TYPE
14327 && TREE_CODE (t) != NULLPTR_TYPE
14328 && TREE_CODE (t) != POINTER_TYPE
14329 && TYPE_CACHED_VALUES_P (t))
14331 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14332 error_found = true;
14335 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14336 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14337 of a type. */
14338 if (TREE_CODE (t) == METHOD_TYPE
14339 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14341 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14342 error_found = true;
14345 if (error_found)
14347 debug_tree (const_cast <tree> (t));
14348 internal_error ("%qs failed", __func__);
14353 /* Return 1 if ARG interpreted as signed in its precision is known to be
14354 always positive or 2 if ARG is known to be always negative, or 3 if
14355 ARG may be positive or negative. */
14358 get_range_pos_neg (tree arg)
14360 if (arg == error_mark_node)
14361 return 3;
14363 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14364 int cnt = 0;
14365 if (TREE_CODE (arg) == INTEGER_CST)
14367 wide_int w = wi::sext (wi::to_wide (arg), prec);
14368 if (wi::neg_p (w))
14369 return 2;
14370 else
14371 return 1;
14373 while (CONVERT_EXPR_P (arg)
14374 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14375 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14377 arg = TREE_OPERAND (arg, 0);
14378 /* Narrower value zero extended into wider type
14379 will always result in positive values. */
14380 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14381 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14382 return 1;
14383 prec = TYPE_PRECISION (TREE_TYPE (arg));
14384 if (++cnt > 30)
14385 return 3;
14388 if (TREE_CODE (arg) != SSA_NAME)
14389 return 3;
14390 value_range r;
14391 while (!get_global_range_query ()->range_of_expr (r, arg)
14392 || r.undefined_p () || r.varying_p ())
14394 gimple *g = SSA_NAME_DEF_STMT (arg);
14395 if (is_gimple_assign (g)
14396 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14398 tree t = gimple_assign_rhs1 (g);
14399 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14400 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14402 if (TYPE_UNSIGNED (TREE_TYPE (t))
14403 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14404 return 1;
14405 prec = TYPE_PRECISION (TREE_TYPE (t));
14406 arg = t;
14407 if (++cnt > 30)
14408 return 3;
14409 continue;
14412 return 3;
14414 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14416 /* For unsigned values, the "positive" range comes
14417 below the "negative" range. */
14418 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14419 return 1;
14420 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14421 return 2;
14423 else
14425 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14426 return 1;
14427 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14428 return 2;
14430 return 3;
14436 /* Return true if ARG is marked with the nonnull attribute in the
14437 current function signature. */
14439 bool
14440 nonnull_arg_p (const_tree arg)
14442 tree t, attrs, fntype;
14443 unsigned HOST_WIDE_INT arg_num;
14445 gcc_assert (TREE_CODE (arg) == PARM_DECL
14446 && (POINTER_TYPE_P (TREE_TYPE (arg))
14447 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14449 /* The static chain decl is always non null. */
14450 if (arg == cfun->static_chain_decl)
14451 return true;
14453 /* THIS argument of method is always non-NULL. */
14454 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14455 && arg == DECL_ARGUMENTS (cfun->decl)
14456 && flag_delete_null_pointer_checks)
14457 return true;
14459 /* Values passed by reference are always non-NULL. */
14460 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14461 && flag_delete_null_pointer_checks)
14462 return true;
14464 fntype = TREE_TYPE (cfun->decl);
14465 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14467 attrs = lookup_attribute ("nonnull", attrs);
14469 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14470 if (attrs == NULL_TREE)
14471 return false;
14473 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14474 if (TREE_VALUE (attrs) == NULL_TREE)
14475 return true;
14477 /* Get the position number for ARG in the function signature. */
14478 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14480 t = DECL_CHAIN (t), arg_num++)
14482 if (t == arg)
14483 break;
14486 gcc_assert (t == arg);
14488 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14489 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14491 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14492 return true;
14496 return false;
14499 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14500 information. */
14502 location_t
14503 set_block (location_t loc, tree block)
14505 location_t pure_loc = get_pure_location (loc);
14506 source_range src_range = get_range_from_loc (line_table, loc);
14507 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14508 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14511 location_t
14512 set_source_range (tree expr, location_t start, location_t finish)
14514 source_range src_range;
14515 src_range.m_start = start;
14516 src_range.m_finish = finish;
14517 return set_source_range (expr, src_range);
14520 location_t
14521 set_source_range (tree expr, source_range src_range)
14523 if (!EXPR_P (expr))
14524 return UNKNOWN_LOCATION;
14526 location_t expr_location = EXPR_LOCATION (expr);
14527 location_t pure_loc = get_pure_location (expr_location);
14528 unsigned discriminator = get_discriminator_from_loc (expr_location);
14529 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14530 pure_loc,
14531 src_range,
14532 NULL,
14533 discriminator);
14534 SET_EXPR_LOCATION (expr, adhoc);
14535 return adhoc;
14538 /* Return EXPR, potentially wrapped with a node expression LOC,
14539 if !CAN_HAVE_LOCATION_P (expr).
14541 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14542 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14544 Wrapper nodes can be identified using location_wrapper_p. */
14546 tree
14547 maybe_wrap_with_location (tree expr, location_t loc)
14549 if (expr == NULL)
14550 return NULL;
14551 if (loc == UNKNOWN_LOCATION)
14552 return expr;
14553 if (CAN_HAVE_LOCATION_P (expr))
14554 return expr;
14555 /* We should only be adding wrappers for constants and for decls,
14556 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14557 gcc_assert (CONSTANT_CLASS_P (expr)
14558 || DECL_P (expr)
14559 || EXCEPTIONAL_CLASS_P (expr));
14561 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14562 any impact of the wrapper nodes. */
14563 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14564 return expr;
14566 /* Compiler-generated temporary variables don't need a wrapper. */
14567 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14568 return expr;
14570 /* If any auto_suppress_location_wrappers are active, don't create
14571 wrappers. */
14572 if (suppress_location_wrappers > 0)
14573 return expr;
14575 tree_code code
14576 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14577 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14578 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14579 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14580 /* Mark this node as being a wrapper. */
14581 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14582 return wrapper;
14585 int suppress_location_wrappers;
14587 /* Return the name of combined function FN, for debugging purposes. */
14589 const char *
14590 combined_fn_name (combined_fn fn)
14592 if (builtin_fn_p (fn))
14594 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14595 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14597 else
14598 return internal_fn_name (as_internal_fn (fn));
14601 /* Return a bitmap with a bit set corresponding to each argument in
14602 a function call type FNTYPE declared with attribute nonnull,
14603 or null if none of the function's argument are nonnull. The caller
14604 must free the bitmap. */
14606 bitmap
14607 get_nonnull_args (const_tree fntype)
14609 if (fntype == NULL_TREE)
14610 return NULL;
14612 bitmap argmap = NULL;
14613 if (TREE_CODE (fntype) == METHOD_TYPE)
14615 /* The this pointer in C++ non-static member functions is
14616 implicitly nonnull whether or not it's declared as such. */
14617 argmap = BITMAP_ALLOC (NULL);
14618 bitmap_set_bit (argmap, 0);
14621 tree attrs = TYPE_ATTRIBUTES (fntype);
14622 if (!attrs)
14623 return argmap;
14625 /* A function declaration can specify multiple attribute nonnull,
14626 each with zero or more arguments. The loop below creates a bitmap
14627 representing a union of all the arguments. An empty (but non-null)
14628 bitmap means that all arguments have been declaraed nonnull. */
14629 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14631 attrs = lookup_attribute ("nonnull", attrs);
14632 if (!attrs)
14633 break;
14635 if (!argmap)
14636 argmap = BITMAP_ALLOC (NULL);
14638 if (!TREE_VALUE (attrs))
14640 /* Clear the bitmap in case a previous attribute nonnull
14641 set it and this one overrides it for all arguments. */
14642 bitmap_clear (argmap);
14643 return argmap;
14646 /* Iterate over the indices of the format arguments declared nonnull
14647 and set a bit for each. */
14648 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14650 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14651 bitmap_set_bit (argmap, val);
14655 return argmap;
14658 /* Returns true if TYPE is a type where it and all of its subobjects
14659 (recursively) are of structure, union, or array type. */
14661 bool
14662 is_empty_type (const_tree type)
14664 if (RECORD_OR_UNION_TYPE_P (type))
14666 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14667 if (TREE_CODE (field) == FIELD_DECL
14668 && !DECL_PADDING_P (field)
14669 && !is_empty_type (TREE_TYPE (field)))
14670 return false;
14671 return true;
14673 else if (TREE_CODE (type) == ARRAY_TYPE)
14674 return (integer_minus_onep (array_type_nelts (type))
14675 || TYPE_DOMAIN (type) == NULL_TREE
14676 || is_empty_type (TREE_TYPE (type)));
14677 return false;
14680 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14681 that shouldn't be passed via stack. */
14683 bool
14684 default_is_empty_record (const_tree type)
14686 if (!abi_version_at_least (12))
14687 return false;
14689 if (type == error_mark_node)
14690 return false;
14692 if (TREE_ADDRESSABLE (type))
14693 return false;
14695 return is_empty_type (TYPE_MAIN_VARIANT (type));
14698 /* Determine whether TYPE is a structure with a flexible array member,
14699 or a union containing such a structure (possibly recursively). */
14701 bool
14702 flexible_array_type_p (const_tree type)
14704 tree x, last;
14705 switch (TREE_CODE (type))
14707 case RECORD_TYPE:
14708 last = NULL_TREE;
14709 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14710 if (TREE_CODE (x) == FIELD_DECL)
14711 last = x;
14712 if (last == NULL_TREE)
14713 return false;
14714 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14715 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14716 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14717 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14718 return true;
14719 return false;
14720 case UNION_TYPE:
14721 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14723 if (TREE_CODE (x) == FIELD_DECL
14724 && flexible_array_type_p (TREE_TYPE (x)))
14725 return true;
14727 return false;
14728 default:
14729 return false;
14733 /* Like int_size_in_bytes, but handle empty records specially. */
14735 HOST_WIDE_INT
14736 arg_int_size_in_bytes (const_tree type)
14738 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14741 /* Like size_in_bytes, but handle empty records specially. */
14743 tree
14744 arg_size_in_bytes (const_tree type)
14746 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14749 /* Return true if an expression with CODE has to have the same result type as
14750 its first operand. */
14752 bool
14753 expr_type_first_operand_type_p (tree_code code)
14755 switch (code)
14757 case NEGATE_EXPR:
14758 case ABS_EXPR:
14759 case BIT_NOT_EXPR:
14760 case PAREN_EXPR:
14761 case CONJ_EXPR:
14763 case PLUS_EXPR:
14764 case MINUS_EXPR:
14765 case MULT_EXPR:
14766 case TRUNC_DIV_EXPR:
14767 case CEIL_DIV_EXPR:
14768 case FLOOR_DIV_EXPR:
14769 case ROUND_DIV_EXPR:
14770 case TRUNC_MOD_EXPR:
14771 case CEIL_MOD_EXPR:
14772 case FLOOR_MOD_EXPR:
14773 case ROUND_MOD_EXPR:
14774 case RDIV_EXPR:
14775 case EXACT_DIV_EXPR:
14776 case MIN_EXPR:
14777 case MAX_EXPR:
14778 case BIT_IOR_EXPR:
14779 case BIT_XOR_EXPR:
14780 case BIT_AND_EXPR:
14782 case LSHIFT_EXPR:
14783 case RSHIFT_EXPR:
14784 case LROTATE_EXPR:
14785 case RROTATE_EXPR:
14786 return true;
14788 default:
14789 return false;
14793 /* Return a typenode for the "standard" C type with a given name. */
14794 tree
14795 get_typenode_from_name (const char *name)
14797 if (name == NULL || *name == '\0')
14798 return NULL_TREE;
14800 if (strcmp (name, "char") == 0)
14801 return char_type_node;
14802 if (strcmp (name, "unsigned char") == 0)
14803 return unsigned_char_type_node;
14804 if (strcmp (name, "signed char") == 0)
14805 return signed_char_type_node;
14807 if (strcmp (name, "short int") == 0)
14808 return short_integer_type_node;
14809 if (strcmp (name, "short unsigned int") == 0)
14810 return short_unsigned_type_node;
14812 if (strcmp (name, "int") == 0)
14813 return integer_type_node;
14814 if (strcmp (name, "unsigned int") == 0)
14815 return unsigned_type_node;
14817 if (strcmp (name, "long int") == 0)
14818 return long_integer_type_node;
14819 if (strcmp (name, "long unsigned int") == 0)
14820 return long_unsigned_type_node;
14822 if (strcmp (name, "long long int") == 0)
14823 return long_long_integer_type_node;
14824 if (strcmp (name, "long long unsigned int") == 0)
14825 return long_long_unsigned_type_node;
14827 gcc_unreachable ();
14830 /* List of pointer types used to declare builtins before we have seen their
14831 real declaration.
14833 Keep the size up to date in tree.h ! */
14834 const builtin_structptr_type builtin_structptr_types[6] =
14836 { fileptr_type_node, ptr_type_node, "FILE" },
14837 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14838 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14839 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14840 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14841 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14844 /* Return the maximum object size. */
14846 tree
14847 max_object_size (void)
14849 /* To do: Make this a configurable parameter. */
14850 return TYPE_MAX_VALUE (ptrdiff_type_node);
14853 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14854 parameter default to false and that weeds out error_mark_node. */
14856 bool
14857 verify_type_context (location_t loc, type_context_kind context,
14858 const_tree type, bool silent_p)
14860 if (type == error_mark_node)
14861 return true;
14863 gcc_assert (TYPE_P (type));
14864 return (!targetm.verify_type_context
14865 || targetm.verify_type_context (loc, context, type, silent_p));
14868 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14869 delete operators. Return false if they may or may not name such
14870 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14871 do not. */
14873 bool
14874 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14875 bool *pcertain /* = NULL */)
14877 bool certain;
14878 if (!pcertain)
14879 pcertain = &certain;
14881 const char *new_name = IDENTIFIER_POINTER (new_asm);
14882 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14883 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14884 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14886 /* The following failures are due to invalid names so they're not
14887 considered certain mismatches. */
14888 *pcertain = false;
14890 if (new_len < 5 || delete_len < 6)
14891 return false;
14892 if (new_name[0] == '_')
14893 ++new_name, --new_len;
14894 if (new_name[0] == '_')
14895 ++new_name, --new_len;
14896 if (delete_name[0] == '_')
14897 ++delete_name, --delete_len;
14898 if (delete_name[0] == '_')
14899 ++delete_name, --delete_len;
14900 if (new_len < 4 || delete_len < 5)
14901 return false;
14903 /* The following failures are due to names of user-defined operators
14904 so they're also not considered certain mismatches. */
14906 /* *_len is now just the length after initial underscores. */
14907 if (new_name[0] != 'Z' || new_name[1] != 'n')
14908 return false;
14909 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14910 return false;
14912 /* The following failures are certain mismatches. */
14913 *pcertain = true;
14915 /* _Znw must match _Zdl, _Zna must match _Zda. */
14916 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14917 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14918 return false;
14919 /* 'j', 'm' and 'y' correspond to size_t. */
14920 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14921 return false;
14922 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14923 return false;
14924 if (new_len == 4
14925 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14927 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14928 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14929 if (delete_len == 5)
14930 return true;
14931 if (delete_len == 6 && delete_name[5] == new_name[3])
14932 return true;
14933 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14934 return true;
14936 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14937 || (new_len == 33
14938 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14940 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14941 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14942 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14943 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14944 return true;
14945 if (delete_len == 21
14946 && delete_name[5] == new_name[3]
14947 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14948 return true;
14949 if (delete_len == 34
14950 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14951 return true;
14954 /* The negative result is conservative. */
14955 *pcertain = false;
14956 return false;
14959 /* Return the zero-based number corresponding to the argument being
14960 deallocated if FNDECL is a deallocation function or an out-of-bounds
14961 value if it isn't. */
14963 unsigned
14964 fndecl_dealloc_argno (tree fndecl)
14966 /* A call to operator delete isn't recognized as one to a built-in. */
14967 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14969 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14970 return 0;
14972 /* Avoid placement delete that's not been inlined. */
14973 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14974 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14975 || id_equal (fname, "_ZdaPvS_")) // array form
14976 return UINT_MAX;
14977 return 0;
14980 /* TODO: Handle user-defined functions with attribute malloc? Handle
14981 known non-built-ins like fopen? */
14982 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14984 switch (DECL_FUNCTION_CODE (fndecl))
14986 case BUILT_IN_FREE:
14987 case BUILT_IN_REALLOC:
14988 return 0;
14989 default:
14990 break;
14992 return UINT_MAX;
14995 tree attrs = DECL_ATTRIBUTES (fndecl);
14996 if (!attrs)
14997 return UINT_MAX;
14999 for (tree atfree = attrs;
15000 (atfree = lookup_attribute ("*dealloc", atfree));
15001 atfree = TREE_CHAIN (atfree))
15003 tree alloc = TREE_VALUE (atfree);
15004 if (!alloc)
15005 continue;
15007 tree pos = TREE_CHAIN (alloc);
15008 if (!pos)
15009 return 0;
15011 pos = TREE_VALUE (pos);
15012 return TREE_INT_CST_LOW (pos) - 1;
15015 return UINT_MAX;
15018 /* If EXPR refers to a character array or pointer declared attribute
15019 nonstring, return a decl for that array or pointer and set *REF
15020 to the referenced enclosing object or pointer. Otherwise return
15021 null. */
15023 tree
15024 get_attr_nonstring_decl (tree expr, tree *ref)
15026 tree decl = expr;
15027 tree var = NULL_TREE;
15028 if (TREE_CODE (decl) == SSA_NAME)
15030 gimple *def = SSA_NAME_DEF_STMT (decl);
15032 if (is_gimple_assign (def))
15034 tree_code code = gimple_assign_rhs_code (def);
15035 if (code == ADDR_EXPR
15036 || code == COMPONENT_REF
15037 || code == VAR_DECL)
15038 decl = gimple_assign_rhs1 (def);
15040 else
15041 var = SSA_NAME_VAR (decl);
15044 if (TREE_CODE (decl) == ADDR_EXPR)
15045 decl = TREE_OPERAND (decl, 0);
15047 /* To simplify calling code, store the referenced DECL regardless of
15048 the attribute determined below, but avoid storing the SSA_NAME_VAR
15049 obtained above (it's not useful for dataflow purposes). */
15050 if (ref)
15051 *ref = decl;
15053 /* Use the SSA_NAME_VAR that was determined above to see if it's
15054 declared nonstring. Otherwise drill down into the referenced
15055 DECL. */
15056 if (var)
15057 decl = var;
15058 else if (TREE_CODE (decl) == ARRAY_REF)
15059 decl = TREE_OPERAND (decl, 0);
15060 else if (TREE_CODE (decl) == COMPONENT_REF)
15061 decl = TREE_OPERAND (decl, 1);
15062 else if (TREE_CODE (decl) == MEM_REF)
15063 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15065 if (DECL_P (decl)
15066 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15067 return decl;
15069 return NULL_TREE;
15072 /* Return length of attribute names string,
15073 if arglist chain > 1, -1 otherwise. */
15076 get_target_clone_attr_len (tree arglist)
15078 tree arg;
15079 int str_len_sum = 0;
15080 int argnum = 0;
15082 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15084 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15085 size_t len = strlen (str);
15086 str_len_sum += len + 1;
15087 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15088 argnum++;
15089 argnum++;
15091 if (argnum <= 1)
15092 return -1;
15093 return str_len_sum;
15096 void
15097 tree_cc_finalize (void)
15099 clear_nonstandard_integer_type_cache ();
15100 vec_free (bitint_type_cache);
15103 #if CHECKING_P
15105 namespace selftest {
15107 /* Selftests for tree. */
15109 /* Verify that integer constants are sane. */
15111 static void
15112 test_integer_constants ()
15114 ASSERT_TRUE (integer_type_node != NULL);
15115 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15117 tree type = integer_type_node;
15119 tree zero = build_zero_cst (type);
15120 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15121 ASSERT_EQ (type, TREE_TYPE (zero));
15123 tree one = build_int_cst (type, 1);
15124 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15125 ASSERT_EQ (type, TREE_TYPE (zero));
15128 /* Verify identifiers. */
15130 static void
15131 test_identifiers ()
15133 tree identifier = get_identifier ("foo");
15134 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15135 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15138 /* Verify LABEL_DECL. */
15140 static void
15141 test_labels ()
15143 tree identifier = get_identifier ("err");
15144 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15145 identifier, void_type_node);
15146 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15147 ASSERT_FALSE (FORCED_LABEL (label_decl));
15150 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15151 are given by VALS. */
15153 static tree
15154 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15156 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15157 tree_vector_builder builder (type, vals.length (), 1);
15158 builder.splice (vals);
15159 return builder.build ();
15162 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15164 static void
15165 check_vector_cst (const vec<tree> &expected, tree actual)
15167 ASSERT_KNOWN_EQ (expected.length (),
15168 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15169 for (unsigned int i = 0; i < expected.length (); ++i)
15170 ASSERT_EQ (wi::to_wide (expected[i]),
15171 wi::to_wide (vector_cst_elt (actual, i)));
15174 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15175 and that its elements match EXPECTED. */
15177 static void
15178 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15179 unsigned int npatterns)
15181 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15182 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15183 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15184 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15185 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15186 check_vector_cst (expected, actual);
15189 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15190 and NPATTERNS background elements, and that its elements match
15191 EXPECTED. */
15193 static void
15194 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15195 unsigned int npatterns)
15197 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15198 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15199 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15200 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15201 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15202 check_vector_cst (expected, actual);
15205 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15206 and that its elements match EXPECTED. */
15208 static void
15209 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15210 unsigned int npatterns)
15212 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15213 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15214 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15215 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15216 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15217 check_vector_cst (expected, actual);
15220 /* Test the creation of VECTOR_CSTs. */
15222 static void
15223 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15225 auto_vec<tree, 8> elements (8);
15226 elements.quick_grow (8);
15227 tree element_type = build_nonstandard_integer_type (16, true);
15228 tree vector_type = build_vector_type (element_type, 8);
15230 /* Test a simple linear series with a base of 0 and a step of 1:
15231 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15232 for (unsigned int i = 0; i < 8; ++i)
15233 elements[i] = build_int_cst (element_type, i);
15234 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15235 check_vector_cst_stepped (elements, vector, 1);
15237 /* Try the same with the first element replaced by 100:
15238 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15239 elements[0] = build_int_cst (element_type, 100);
15240 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15241 check_vector_cst_stepped (elements, vector, 1);
15243 /* Try a series that wraps around.
15244 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15245 for (unsigned int i = 1; i < 8; ++i)
15246 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15247 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15248 check_vector_cst_stepped (elements, vector, 1);
15250 /* Try a downward series:
15251 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15252 for (unsigned int i = 1; i < 8; ++i)
15253 elements[i] = build_int_cst (element_type, 80 - i);
15254 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15255 check_vector_cst_stepped (elements, vector, 1);
15257 /* Try two interleaved series with different bases and steps:
15258 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15259 elements[1] = build_int_cst (element_type, 53);
15260 for (unsigned int i = 2; i < 8; i += 2)
15262 elements[i] = build_int_cst (element_type, 70 - i * 2);
15263 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15265 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15266 check_vector_cst_stepped (elements, vector, 2);
15268 /* Try a duplicated value:
15269 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15270 for (unsigned int i = 1; i < 8; ++i)
15271 elements[i] = elements[0];
15272 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15273 check_vector_cst_duplicate (elements, vector, 1);
15275 /* Try an interleaved duplicated value:
15276 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15277 elements[1] = build_int_cst (element_type, 55);
15278 for (unsigned int i = 2; i < 8; ++i)
15279 elements[i] = elements[i - 2];
15280 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15281 check_vector_cst_duplicate (elements, vector, 2);
15283 /* Try a duplicated value with 2 exceptions
15284 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15285 elements[0] = build_int_cst (element_type, 41);
15286 elements[1] = build_int_cst (element_type, 97);
15287 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15288 check_vector_cst_fill (elements, vector, 2);
15290 /* Try with and without a step
15291 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15292 for (unsigned int i = 3; i < 8; i += 2)
15293 elements[i] = build_int_cst (element_type, i * 7);
15294 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15295 check_vector_cst_stepped (elements, vector, 2);
15297 /* Try a fully-general constant:
15298 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15299 elements[5] = build_int_cst (element_type, 9990);
15300 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15301 check_vector_cst_fill (elements, vector, 4);
15304 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15305 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15306 modifying its argument in-place. */
15308 static void
15309 check_strip_nops (tree node, tree expected)
15311 STRIP_NOPS (node);
15312 ASSERT_EQ (expected, node);
15315 /* Verify location wrappers. */
15317 static void
15318 test_location_wrappers ()
15320 location_t loc = BUILTINS_LOCATION;
15322 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15324 /* Wrapping a constant. */
15325 tree int_cst = build_int_cst (integer_type_node, 42);
15326 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15327 ASSERT_FALSE (location_wrapper_p (int_cst));
15329 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15330 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15331 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15332 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15334 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15335 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15337 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15338 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15339 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15340 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15342 /* Wrapping a STRING_CST. */
15343 tree string_cst = build_string (4, "foo");
15344 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15345 ASSERT_FALSE (location_wrapper_p (string_cst));
15347 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15348 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15349 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15350 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15351 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15354 /* Wrapping a variable. */
15355 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15356 get_identifier ("some_int_var"),
15357 integer_type_node);
15358 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15359 ASSERT_FALSE (location_wrapper_p (int_var));
15361 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15362 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15363 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15364 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15366 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15367 wrapper. */
15368 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15369 ASSERT_FALSE (location_wrapper_p (r_cast));
15370 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15372 /* Verify that STRIP_NOPS removes wrappers. */
15373 check_strip_nops (wrapped_int_cst, int_cst);
15374 check_strip_nops (wrapped_string_cst, string_cst);
15375 check_strip_nops (wrapped_int_var, int_var);
15378 /* Test various tree predicates. Verify that location wrappers don't
15379 affect the results. */
15381 static void
15382 test_predicates ()
15384 /* Build various constants and wrappers around them. */
15386 location_t loc = BUILTINS_LOCATION;
15388 tree i_0 = build_int_cst (integer_type_node, 0);
15389 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15391 tree i_1 = build_int_cst (integer_type_node, 1);
15392 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15394 tree i_m1 = build_int_cst (integer_type_node, -1);
15395 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15397 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15398 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15399 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15400 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15401 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15402 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15404 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15405 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15406 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15408 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15409 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15410 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15412 /* TODO: vector constants. */
15414 /* Test integer_onep. */
15415 ASSERT_FALSE (integer_onep (i_0));
15416 ASSERT_FALSE (integer_onep (wr_i_0));
15417 ASSERT_TRUE (integer_onep (i_1));
15418 ASSERT_TRUE (integer_onep (wr_i_1));
15419 ASSERT_FALSE (integer_onep (i_m1));
15420 ASSERT_FALSE (integer_onep (wr_i_m1));
15421 ASSERT_FALSE (integer_onep (f_0));
15422 ASSERT_FALSE (integer_onep (wr_f_0));
15423 ASSERT_FALSE (integer_onep (f_1));
15424 ASSERT_FALSE (integer_onep (wr_f_1));
15425 ASSERT_FALSE (integer_onep (f_m1));
15426 ASSERT_FALSE (integer_onep (wr_f_m1));
15427 ASSERT_FALSE (integer_onep (c_i_0));
15428 ASSERT_TRUE (integer_onep (c_i_1));
15429 ASSERT_FALSE (integer_onep (c_i_m1));
15430 ASSERT_FALSE (integer_onep (c_f_0));
15431 ASSERT_FALSE (integer_onep (c_f_1));
15432 ASSERT_FALSE (integer_onep (c_f_m1));
15434 /* Test integer_zerop. */
15435 ASSERT_TRUE (integer_zerop (i_0));
15436 ASSERT_TRUE (integer_zerop (wr_i_0));
15437 ASSERT_FALSE (integer_zerop (i_1));
15438 ASSERT_FALSE (integer_zerop (wr_i_1));
15439 ASSERT_FALSE (integer_zerop (i_m1));
15440 ASSERT_FALSE (integer_zerop (wr_i_m1));
15441 ASSERT_FALSE (integer_zerop (f_0));
15442 ASSERT_FALSE (integer_zerop (wr_f_0));
15443 ASSERT_FALSE (integer_zerop (f_1));
15444 ASSERT_FALSE (integer_zerop (wr_f_1));
15445 ASSERT_FALSE (integer_zerop (f_m1));
15446 ASSERT_FALSE (integer_zerop (wr_f_m1));
15447 ASSERT_TRUE (integer_zerop (c_i_0));
15448 ASSERT_FALSE (integer_zerop (c_i_1));
15449 ASSERT_FALSE (integer_zerop (c_i_m1));
15450 ASSERT_FALSE (integer_zerop (c_f_0));
15451 ASSERT_FALSE (integer_zerop (c_f_1));
15452 ASSERT_FALSE (integer_zerop (c_f_m1));
15454 /* Test integer_all_onesp. */
15455 ASSERT_FALSE (integer_all_onesp (i_0));
15456 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15457 ASSERT_FALSE (integer_all_onesp (i_1));
15458 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15459 ASSERT_TRUE (integer_all_onesp (i_m1));
15460 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15461 ASSERT_FALSE (integer_all_onesp (f_0));
15462 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15463 ASSERT_FALSE (integer_all_onesp (f_1));
15464 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15465 ASSERT_FALSE (integer_all_onesp (f_m1));
15466 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15467 ASSERT_FALSE (integer_all_onesp (c_i_0));
15468 ASSERT_FALSE (integer_all_onesp (c_i_1));
15469 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15470 ASSERT_FALSE (integer_all_onesp (c_f_0));
15471 ASSERT_FALSE (integer_all_onesp (c_f_1));
15472 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15474 /* Test integer_minus_onep. */
15475 ASSERT_FALSE (integer_minus_onep (i_0));
15476 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15477 ASSERT_FALSE (integer_minus_onep (i_1));
15478 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15479 ASSERT_TRUE (integer_minus_onep (i_m1));
15480 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15481 ASSERT_FALSE (integer_minus_onep (f_0));
15482 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15483 ASSERT_FALSE (integer_minus_onep (f_1));
15484 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15485 ASSERT_FALSE (integer_minus_onep (f_m1));
15486 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15487 ASSERT_FALSE (integer_minus_onep (c_i_0));
15488 ASSERT_FALSE (integer_minus_onep (c_i_1));
15489 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15490 ASSERT_FALSE (integer_minus_onep (c_f_0));
15491 ASSERT_FALSE (integer_minus_onep (c_f_1));
15492 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15494 /* Test integer_each_onep. */
15495 ASSERT_FALSE (integer_each_onep (i_0));
15496 ASSERT_FALSE (integer_each_onep (wr_i_0));
15497 ASSERT_TRUE (integer_each_onep (i_1));
15498 ASSERT_TRUE (integer_each_onep (wr_i_1));
15499 ASSERT_FALSE (integer_each_onep (i_m1));
15500 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15501 ASSERT_FALSE (integer_each_onep (f_0));
15502 ASSERT_FALSE (integer_each_onep (wr_f_0));
15503 ASSERT_FALSE (integer_each_onep (f_1));
15504 ASSERT_FALSE (integer_each_onep (wr_f_1));
15505 ASSERT_FALSE (integer_each_onep (f_m1));
15506 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15507 ASSERT_FALSE (integer_each_onep (c_i_0));
15508 ASSERT_FALSE (integer_each_onep (c_i_1));
15509 ASSERT_FALSE (integer_each_onep (c_i_m1));
15510 ASSERT_FALSE (integer_each_onep (c_f_0));
15511 ASSERT_FALSE (integer_each_onep (c_f_1));
15512 ASSERT_FALSE (integer_each_onep (c_f_m1));
15514 /* Test integer_truep. */
15515 ASSERT_FALSE (integer_truep (i_0));
15516 ASSERT_FALSE (integer_truep (wr_i_0));
15517 ASSERT_TRUE (integer_truep (i_1));
15518 ASSERT_TRUE (integer_truep (wr_i_1));
15519 ASSERT_FALSE (integer_truep (i_m1));
15520 ASSERT_FALSE (integer_truep (wr_i_m1));
15521 ASSERT_FALSE (integer_truep (f_0));
15522 ASSERT_FALSE (integer_truep (wr_f_0));
15523 ASSERT_FALSE (integer_truep (f_1));
15524 ASSERT_FALSE (integer_truep (wr_f_1));
15525 ASSERT_FALSE (integer_truep (f_m1));
15526 ASSERT_FALSE (integer_truep (wr_f_m1));
15527 ASSERT_FALSE (integer_truep (c_i_0));
15528 ASSERT_TRUE (integer_truep (c_i_1));
15529 ASSERT_FALSE (integer_truep (c_i_m1));
15530 ASSERT_FALSE (integer_truep (c_f_0));
15531 ASSERT_FALSE (integer_truep (c_f_1));
15532 ASSERT_FALSE (integer_truep (c_f_m1));
15534 /* Test integer_nonzerop. */
15535 ASSERT_FALSE (integer_nonzerop (i_0));
15536 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15537 ASSERT_TRUE (integer_nonzerop (i_1));
15538 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15539 ASSERT_TRUE (integer_nonzerop (i_m1));
15540 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15541 ASSERT_FALSE (integer_nonzerop (f_0));
15542 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15543 ASSERT_FALSE (integer_nonzerop (f_1));
15544 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15545 ASSERT_FALSE (integer_nonzerop (f_m1));
15546 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15547 ASSERT_FALSE (integer_nonzerop (c_i_0));
15548 ASSERT_TRUE (integer_nonzerop (c_i_1));
15549 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15550 ASSERT_FALSE (integer_nonzerop (c_f_0));
15551 ASSERT_FALSE (integer_nonzerop (c_f_1));
15552 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15554 /* Test real_zerop. */
15555 ASSERT_FALSE (real_zerop (i_0));
15556 ASSERT_FALSE (real_zerop (wr_i_0));
15557 ASSERT_FALSE (real_zerop (i_1));
15558 ASSERT_FALSE (real_zerop (wr_i_1));
15559 ASSERT_FALSE (real_zerop (i_m1));
15560 ASSERT_FALSE (real_zerop (wr_i_m1));
15561 ASSERT_TRUE (real_zerop (f_0));
15562 ASSERT_TRUE (real_zerop (wr_f_0));
15563 ASSERT_FALSE (real_zerop (f_1));
15564 ASSERT_FALSE (real_zerop (wr_f_1));
15565 ASSERT_FALSE (real_zerop (f_m1));
15566 ASSERT_FALSE (real_zerop (wr_f_m1));
15567 ASSERT_FALSE (real_zerop (c_i_0));
15568 ASSERT_FALSE (real_zerop (c_i_1));
15569 ASSERT_FALSE (real_zerop (c_i_m1));
15570 ASSERT_TRUE (real_zerop (c_f_0));
15571 ASSERT_FALSE (real_zerop (c_f_1));
15572 ASSERT_FALSE (real_zerop (c_f_m1));
15574 /* Test real_onep. */
15575 ASSERT_FALSE (real_onep (i_0));
15576 ASSERT_FALSE (real_onep (wr_i_0));
15577 ASSERT_FALSE (real_onep (i_1));
15578 ASSERT_FALSE (real_onep (wr_i_1));
15579 ASSERT_FALSE (real_onep (i_m1));
15580 ASSERT_FALSE (real_onep (wr_i_m1));
15581 ASSERT_FALSE (real_onep (f_0));
15582 ASSERT_FALSE (real_onep (wr_f_0));
15583 ASSERT_TRUE (real_onep (f_1));
15584 ASSERT_TRUE (real_onep (wr_f_1));
15585 ASSERT_FALSE (real_onep (f_m1));
15586 ASSERT_FALSE (real_onep (wr_f_m1));
15587 ASSERT_FALSE (real_onep (c_i_0));
15588 ASSERT_FALSE (real_onep (c_i_1));
15589 ASSERT_FALSE (real_onep (c_i_m1));
15590 ASSERT_FALSE (real_onep (c_f_0));
15591 ASSERT_TRUE (real_onep (c_f_1));
15592 ASSERT_FALSE (real_onep (c_f_m1));
15594 /* Test real_minus_onep. */
15595 ASSERT_FALSE (real_minus_onep (i_0));
15596 ASSERT_FALSE (real_minus_onep (wr_i_0));
15597 ASSERT_FALSE (real_minus_onep (i_1));
15598 ASSERT_FALSE (real_minus_onep (wr_i_1));
15599 ASSERT_FALSE (real_minus_onep (i_m1));
15600 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15601 ASSERT_FALSE (real_minus_onep (f_0));
15602 ASSERT_FALSE (real_minus_onep (wr_f_0));
15603 ASSERT_FALSE (real_minus_onep (f_1));
15604 ASSERT_FALSE (real_minus_onep (wr_f_1));
15605 ASSERT_TRUE (real_minus_onep (f_m1));
15606 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15607 ASSERT_FALSE (real_minus_onep (c_i_0));
15608 ASSERT_FALSE (real_minus_onep (c_i_1));
15609 ASSERT_FALSE (real_minus_onep (c_i_m1));
15610 ASSERT_FALSE (real_minus_onep (c_f_0));
15611 ASSERT_FALSE (real_minus_onep (c_f_1));
15612 ASSERT_TRUE (real_minus_onep (c_f_m1));
15614 /* Test zerop. */
15615 ASSERT_TRUE (zerop (i_0));
15616 ASSERT_TRUE (zerop (wr_i_0));
15617 ASSERT_FALSE (zerop (i_1));
15618 ASSERT_FALSE (zerop (wr_i_1));
15619 ASSERT_FALSE (zerop (i_m1));
15620 ASSERT_FALSE (zerop (wr_i_m1));
15621 ASSERT_TRUE (zerop (f_0));
15622 ASSERT_TRUE (zerop (wr_f_0));
15623 ASSERT_FALSE (zerop (f_1));
15624 ASSERT_FALSE (zerop (wr_f_1));
15625 ASSERT_FALSE (zerop (f_m1));
15626 ASSERT_FALSE (zerop (wr_f_m1));
15627 ASSERT_TRUE (zerop (c_i_0));
15628 ASSERT_FALSE (zerop (c_i_1));
15629 ASSERT_FALSE (zerop (c_i_m1));
15630 ASSERT_TRUE (zerop (c_f_0));
15631 ASSERT_FALSE (zerop (c_f_1));
15632 ASSERT_FALSE (zerop (c_f_m1));
15634 /* Test tree_expr_nonnegative_p. */
15635 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15636 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15637 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15638 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15639 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15640 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15641 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15642 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15643 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15644 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15645 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15646 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15647 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15648 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15649 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15650 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15651 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15652 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15654 /* Test tree_expr_nonzero_p. */
15655 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15656 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15657 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15658 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15659 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15660 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15662 /* Test integer_valued_real_p. */
15663 ASSERT_FALSE (integer_valued_real_p (i_0));
15664 ASSERT_TRUE (integer_valued_real_p (f_0));
15665 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15666 ASSERT_TRUE (integer_valued_real_p (f_1));
15667 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15669 /* Test integer_pow2p. */
15670 ASSERT_FALSE (integer_pow2p (i_0));
15671 ASSERT_TRUE (integer_pow2p (i_1));
15672 ASSERT_TRUE (integer_pow2p (wr_i_1));
15674 /* Test uniform_integer_cst_p. */
15675 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15676 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15677 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15678 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15679 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15680 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15681 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15682 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15683 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15684 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15685 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15686 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15687 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15688 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15689 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15690 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15691 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15692 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15695 /* Check that string escaping works correctly. */
15697 static void
15698 test_escaped_strings (void)
15700 int saved_cutoff;
15701 escaped_string msg;
15703 msg.escape (NULL);
15704 /* ASSERT_STREQ does not accept NULL as a valid test
15705 result, so we have to use ASSERT_EQ instead. */
15706 ASSERT_EQ (NULL, (const char *) msg);
15708 msg.escape ("");
15709 ASSERT_STREQ ("", (const char *) msg);
15711 msg.escape ("foobar");
15712 ASSERT_STREQ ("foobar", (const char *) msg);
15714 /* Ensure that we have -fmessage-length set to 0. */
15715 saved_cutoff = pp_line_cutoff (global_dc->printer);
15716 pp_line_cutoff (global_dc->printer) = 0;
15718 msg.escape ("foo\nbar");
15719 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15721 msg.escape ("\a\b\f\n\r\t\v");
15722 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15724 /* Now repeat the tests with -fmessage-length set to 5. */
15725 pp_line_cutoff (global_dc->printer) = 5;
15727 /* Note that the newline is not translated into an escape. */
15728 msg.escape ("foo\nbar");
15729 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15731 msg.escape ("\a\b\f\n\r\t\v");
15732 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15734 /* Restore the original message length setting. */
15735 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15738 /* Run all of the selftests within this file. */
15740 void
15741 tree_cc_tests ()
15743 test_integer_constants ();
15744 test_identifiers ();
15745 test_labels ();
15746 test_vector_cst_patterns ();
15747 test_location_wrappers ();
15748 test_predicates ();
15749 test_escaped_strings ();
15752 } // namespace selftest
15754 #endif /* CHECKING_P */
15756 #include "gt-tree.h"