Remove extra newline
[official-gcc.git] / gcc / tree.c
blobe451401822c8eed91976b62a47386459534e4eae
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
72 /* Tree code classes. */
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
113 const char *const tree_code_class_strings[] =
115 "exceptional",
116 "constant",
117 "type",
118 "declaration",
119 "reference",
120 "comparison",
121 "unary",
122 "binary",
123 "statement",
124 "vl_exp",
125 "expression"
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131 /* Statistics-gathering stuff. */
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names[] = {
139 "decls",
140 "types",
141 "blocks",
142 "stmts",
143 "refs",
144 "exprs",
145 "constants",
146 "identifiers",
147 "vecs",
148 "binfos",
149 "ssa names",
150 "constructors",
151 "random kinds",
152 "lang_decl kinds",
153 "lang_type kinds",
154 "omp clauses",
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid;
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
168 struct GTY((for_user)) type_hash {
169 unsigned long hash;
170 tree type;
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 static hashval_t hash (type_hash *t) { return t->hash; }
179 static bool equal (type_hash *a, type_hash *b);
181 static int
182 keep_cache_entry (type_hash *&t)
184 return ggc_marked_p (t->type);
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node;
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 static hashval_t hash (tree t);
203 static bool equal (tree x, tree y);
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 typedef std::pair<tree, const poly_wide_int *> compare_type;
213 static hashval_t hash (tree t);
214 static bool equal (tree x, const compare_type &y);
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 static hashval_t hash (tree t);
230 static bool equal (tree x, tree y);
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235 /* General tree->tree mapping structure for use in hash tables. */
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248 static bool
249 equal (tree_vec_map *a, tree_vec_map *b)
251 return a->base.from == b->base.from;
254 static int
255 keep_cache_entry (tree_vec_map *&m)
257 return ggc_marked_p (m->base.from);
261 static GTY ((cache))
262 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
277 bool tree_contains_struct[MAX_TREE_CODES][64];
279 /* Number of operands for each OpenMP clause. */
280 unsigned const char omp_clause_num_ops[] =
282 0, /* OMP_CLAUSE_ERROR */
283 1, /* OMP_CLAUSE_PRIVATE */
284 1, /* OMP_CLAUSE_SHARED */
285 1, /* OMP_CLAUSE_FIRSTPRIVATE */
286 2, /* OMP_CLAUSE_LASTPRIVATE */
287 5, /* OMP_CLAUSE_REDUCTION */
288 5, /* OMP_CLAUSE_TASK_REDUCTION */
289 5, /* OMP_CLAUSE_IN_REDUCTION */
290 1, /* OMP_CLAUSE_COPYIN */
291 1, /* OMP_CLAUSE_COPYPRIVATE */
292 3, /* OMP_CLAUSE_LINEAR */
293 2, /* OMP_CLAUSE_ALIGNED */
294 1, /* OMP_CLAUSE_DEPEND */
295 1, /* OMP_CLAUSE_NONTEMPORAL */
296 1, /* OMP_CLAUSE_UNIFORM */
297 1, /* OMP_CLAUSE_TO_DECLARE */
298 1, /* OMP_CLAUSE_LINK */
299 2, /* OMP_CLAUSE_FROM */
300 2, /* OMP_CLAUSE_TO */
301 2, /* OMP_CLAUSE_MAP */
302 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
303 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
304 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
305 1, /* OMP_CLAUSE_INCLUSIVE */
306 1, /* OMP_CLAUSE_EXCLUSIVE */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 2, /* OMP_CLAUSE__GRIDDIM_ */
361 0, /* OMP_CLAUSE_IF_PRESENT */
362 0, /* OMP_CLAUSE_FINALIZE */
365 const char * const omp_clause_code_name[] =
367 "error_clause",
368 "private",
369 "shared",
370 "firstprivate",
371 "lastprivate",
372 "reduction",
373 "task_reduction",
374 "in_reduction",
375 "copyin",
376 "copyprivate",
377 "linear",
378 "aligned",
379 "depend",
380 "nontemporal",
381 "uniform",
382 "to",
383 "link",
384 "from",
385 "to",
386 "map",
387 "use_device_ptr",
388 "use_device_addr",
389 "is_device_ptr",
390 "inclusive",
391 "exclusive",
392 "_cache_",
393 "gang",
394 "async",
395 "wait",
396 "auto",
397 "seq",
398 "_looptemp_",
399 "_reductemp_",
400 "_condtemp_",
401 "_scantemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "device_type",
422 "for",
423 "parallel",
424 "sections",
425 "taskgroup",
426 "priority",
427 "grainsize",
428 "num_tasks",
429 "nogroup",
430 "threads",
431 "simd",
432 "hint",
433 "defaultmap",
434 "order",
435 "bind",
436 "_simduid_",
437 "_simt_",
438 "independent",
439 "worker",
440 "vector",
441 "num_gangs",
442 "num_workers",
443 "vector_length",
444 "tile",
445 "_griddim_",
446 "if_present",
447 "finalize",
451 /* Return the tree node structure used by tree code CODE. */
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code)
456 switch (TREE_CODE_CLASS (code))
458 case tcc_declaration:
459 switch (code)
461 case CONST_DECL: return TS_CONST_DECL;
462 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
463 case FIELD_DECL: return TS_FIELD_DECL;
464 case FUNCTION_DECL: return TS_FUNCTION_DECL;
465 case LABEL_DECL: return TS_LABEL_DECL;
466 case PARM_DECL: return TS_PARM_DECL;
467 case RESULT_DECL: return TS_RESULT_DECL;
468 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 case TYPE_DECL: return TS_TYPE_DECL;
470 case VAR_DECL: return TS_VAR_DECL;
471 default: return TS_DECL_NON_COMMON;
474 case tcc_type: return TS_TYPE_NON_COMMON;
476 case tcc_binary:
477 case tcc_comparison:
478 case tcc_expression:
479 case tcc_reference:
480 case tcc_statement:
481 case tcc_unary:
482 case tcc_vl_exp: return TS_EXP;
484 default: /* tcc_constant and tcc_exceptional */
485 break;
488 switch (code)
490 /* tcc_constant cases. */
491 case COMPLEX_CST: return TS_COMPLEX;
492 case FIXED_CST: return TS_FIXED_CST;
493 case INTEGER_CST: return TS_INT_CST;
494 case POLY_INT_CST: return TS_POLY_INT_CST;
495 case REAL_CST: return TS_REAL_CST;
496 case STRING_CST: return TS_STRING;
497 case VECTOR_CST: return TS_VECTOR;
498 case VOID_CST: return TS_TYPED;
500 /* tcc_exceptional cases. */
501 case BLOCK: return TS_BLOCK;
502 case CONSTRUCTOR: return TS_CONSTRUCTOR;
503 case ERROR_MARK: return TS_COMMON;
504 case IDENTIFIER_NODE: return TS_IDENTIFIER;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case PLACEHOLDER_EXPR: return TS_COMMON;
508 case SSA_NAME: return TS_SSA_NAME;
509 case STATEMENT_LIST: return TS_STATEMENT_LIST;
510 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
511 case TREE_BINFO: return TS_BINFO;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
515 default:
516 gcc_unreachable ();
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522 nodes. */
524 static void
525 initialize_tree_contains_struct (void)
527 unsigned i;
529 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
531 enum tree_code code;
532 enum tree_node_structure_enum ts_code;
534 code = (enum tree_code) i;
535 ts_code = tree_node_structure_for_code (code);
537 /* Mark the TS structure itself. */
538 tree_contains_struct[code][ts_code] = 1;
540 /* Mark all the structures that TS is derived from. */
541 switch (ts_code)
543 case TS_TYPED:
544 case TS_BLOCK:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_BASE (code);
548 break;
550 case TS_COMMON:
551 case TS_INT_CST:
552 case TS_POLY_INT_CST:
553 case TS_REAL_CST:
554 case TS_FIXED_CST:
555 case TS_VECTOR:
556 case TS_STRING:
557 case TS_COMPLEX:
558 case TS_SSA_NAME:
559 case TS_CONSTRUCTOR:
560 case TS_EXP:
561 case TS_STATEMENT_LIST:
562 MARK_TS_TYPED (code);
563 break;
565 case TS_IDENTIFIER:
566 case TS_DECL_MINIMAL:
567 case TS_TYPE_COMMON:
568 case TS_LIST:
569 case TS_VEC:
570 case TS_BINFO:
571 case TS_OMP_CLAUSE:
572 MARK_TS_COMMON (code);
573 break;
575 case TS_TYPE_WITH_LANG_SPECIFIC:
576 MARK_TS_TYPE_COMMON (code);
577 break;
579 case TS_TYPE_NON_COMMON:
580 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 break;
583 case TS_DECL_COMMON:
584 MARK_TS_DECL_MINIMAL (code);
585 break;
587 case TS_DECL_WRTL:
588 case TS_CONST_DECL:
589 MARK_TS_DECL_COMMON (code);
590 break;
592 case TS_DECL_NON_COMMON:
593 MARK_TS_DECL_WITH_VIS (code);
594 break;
596 case TS_DECL_WITH_VIS:
597 case TS_PARM_DECL:
598 case TS_LABEL_DECL:
599 case TS_RESULT_DECL:
600 MARK_TS_DECL_WRTL (code);
601 break;
603 case TS_FIELD_DECL:
604 MARK_TS_DECL_COMMON (code);
605 break;
607 case TS_VAR_DECL:
608 MARK_TS_DECL_WITH_VIS (code);
609 break;
611 case TS_TYPE_DECL:
612 case TS_FUNCTION_DECL:
613 MARK_TS_DECL_NON_COMMON (code);
614 break;
616 case TS_TRANSLATION_UNIT_DECL:
617 MARK_TS_DECL_COMMON (code);
618 break;
620 default:
621 gcc_unreachable ();
625 /* Basic consistency checks for attributes used in fold. */
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
669 /* Init tree.c. */
671 void
672 init_ttree (void)
674 /* Initialize the hash table of types. */
675 type_hash_table
676 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
678 debug_expr_for_decl
679 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
681 value_expr_for_decl
682 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
684 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
686 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
688 int_cst_node = make_int_cst (1, 1);
690 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
692 cl_optimization_node = make_node (OPTIMIZATION_NODE);
693 cl_target_option_node = make_node (TARGET_OPTION_NODE);
695 /* Initialize the tree_contains_struct array. */
696 initialize_tree_contains_struct ();
697 lang_hooks.init_ts ();
701 /* The name of the object as the assembler will see it (but before any
702 translations made by ASM_OUTPUT_LABELREF). Often this is the same
703 as DECL_NAME. It is an IDENTIFIER_NODE. */
704 tree
705 decl_assembler_name (tree decl)
707 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708 lang_hooks.set_decl_assembler_name (decl);
709 return DECL_ASSEMBLER_NAME_RAW (decl);
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713 (either of which may be NULL). Inform the FE, if this changes the
714 name. */
716 void
717 overwrite_decl_assembler_name (tree decl, tree name)
719 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720 lang_hooks.overwrite_decl_assembler_name (decl, name);
723 /* When the target supports COMDAT groups, this indicates which group the
724 DECL is associated with. This can be either an IDENTIFIER_NODE or a
725 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
726 tree
727 decl_comdat_group (const_tree node)
729 struct symtab_node *snode = symtab_node::get (node);
730 if (!snode)
731 return NULL;
732 return snode->get_comdat_group ();
735 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
736 tree
737 decl_comdat_group_id (const_tree node)
739 struct symtab_node *snode = symtab_node::get (node);
740 if (!snode)
741 return NULL;
742 return snode->get_comdat_group_id ();
745 /* When the target supports named section, return its name as IDENTIFIER_NODE
746 or NULL if it is in no section. */
747 const char *
748 decl_section_name (const_tree node)
750 struct symtab_node *snode = symtab_node::get (node);
751 if (!snode)
752 return NULL;
753 return snode->get_section ();
756 /* Set section name of NODE to VALUE (that is expected to be
757 identifier node) */
758 void
759 set_decl_section_name (tree node, const char *value)
761 struct symtab_node *snode;
763 if (value == NULL)
765 snode = symtab_node::get (node);
766 if (!snode)
767 return;
769 else if (VAR_P (node))
770 snode = varpool_node::get_create (node);
771 else
772 snode = cgraph_node::get_create (node);
773 snode->set_section (value);
776 /* Return TLS model of a variable NODE. */
777 enum tls_model
778 decl_tls_model (const_tree node)
780 struct varpool_node *snode = varpool_node::get (node);
781 if (!snode)
782 return TLS_MODEL_NONE;
783 return snode->tls_model;
786 /* Set TLS model of variable NODE to MODEL. */
787 void
788 set_decl_tls_model (tree node, enum tls_model model)
790 struct varpool_node *vnode;
792 if (model == TLS_MODEL_NONE)
794 vnode = varpool_node::get (node);
795 if (!vnode)
796 return;
798 else
799 vnode = varpool_node::get_create (node);
800 vnode->tls_model = model;
803 /* Compute the number of bytes occupied by a tree with code CODE.
804 This function cannot be used for nodes that have variable sizes,
805 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
806 size_t
807 tree_code_size (enum tree_code code)
809 switch (TREE_CODE_CLASS (code))
811 case tcc_declaration: /* A decl node */
812 switch (code)
814 case FIELD_DECL: return sizeof (tree_field_decl);
815 case PARM_DECL: return sizeof (tree_parm_decl);
816 case VAR_DECL: return sizeof (tree_var_decl);
817 case LABEL_DECL: return sizeof (tree_label_decl);
818 case RESULT_DECL: return sizeof (tree_result_decl);
819 case CONST_DECL: return sizeof (tree_const_decl);
820 case TYPE_DECL: return sizeof (tree_type_decl);
821 case FUNCTION_DECL: return sizeof (tree_function_decl);
822 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
823 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
824 case NAMESPACE_DECL:
825 case IMPORTED_DECL:
826 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
827 default:
828 gcc_checking_assert (code >= NUM_TREE_CODES);
829 return lang_hooks.tree_size (code);
832 case tcc_type: /* a type node */
833 switch (code)
835 case OFFSET_TYPE:
836 case ENUMERAL_TYPE:
837 case BOOLEAN_TYPE:
838 case INTEGER_TYPE:
839 case REAL_TYPE:
840 case POINTER_TYPE:
841 case REFERENCE_TYPE:
842 case NULLPTR_TYPE:
843 case FIXED_POINT_TYPE:
844 case COMPLEX_TYPE:
845 case VECTOR_TYPE:
846 case ARRAY_TYPE:
847 case RECORD_TYPE:
848 case UNION_TYPE:
849 case QUAL_UNION_TYPE:
850 case VOID_TYPE:
851 case FUNCTION_TYPE:
852 case METHOD_TYPE:
853 case LANG_TYPE: return sizeof (tree_type_non_common);
854 default:
855 gcc_checking_assert (code >= NUM_TREE_CODES);
856 return lang_hooks.tree_size (code);
859 case tcc_reference: /* a reference */
860 case tcc_expression: /* an expression */
861 case tcc_statement: /* an expression with side effects */
862 case tcc_comparison: /* a comparison expression */
863 case tcc_unary: /* a unary arithmetic expression */
864 case tcc_binary: /* a binary arithmetic expression */
865 return (sizeof (struct tree_exp)
866 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
868 case tcc_constant: /* a constant */
869 switch (code)
871 case VOID_CST: return sizeof (tree_typed);
872 case INTEGER_CST: gcc_unreachable ();
873 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
874 case REAL_CST: return sizeof (tree_real_cst);
875 case FIXED_CST: return sizeof (tree_fixed_cst);
876 case COMPLEX_CST: return sizeof (tree_complex);
877 case VECTOR_CST: gcc_unreachable ();
878 case STRING_CST: gcc_unreachable ();
879 default:
880 gcc_checking_assert (code >= NUM_TREE_CODES);
881 return lang_hooks.tree_size (code);
884 case tcc_exceptional: /* something random, like an identifier. */
885 switch (code)
887 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
888 case TREE_LIST: return sizeof (tree_list);
890 case ERROR_MARK:
891 case PLACEHOLDER_EXPR: return sizeof (tree_common);
893 case TREE_VEC: gcc_unreachable ();
894 case OMP_CLAUSE: gcc_unreachable ();
896 case SSA_NAME: return sizeof (tree_ssa_name);
898 case STATEMENT_LIST: return sizeof (tree_statement_list);
899 case BLOCK: return sizeof (struct tree_block);
900 case CONSTRUCTOR: return sizeof (tree_constructor);
901 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
902 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
904 default:
905 gcc_checking_assert (code >= NUM_TREE_CODES);
906 return lang_hooks.tree_size (code);
909 default:
910 gcc_unreachable ();
914 /* Compute the number of bytes occupied by NODE. This routine only
915 looks at TREE_CODE, except for those nodes that have variable sizes. */
916 size_t
917 tree_size (const_tree node)
919 const enum tree_code code = TREE_CODE (node);
920 switch (code)
922 case INTEGER_CST:
923 return (sizeof (struct tree_int_cst)
924 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
926 case TREE_BINFO:
927 return (offsetof (struct tree_binfo, base_binfos)
928 + vec<tree, va_gc>
929 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
931 case TREE_VEC:
932 return (sizeof (struct tree_vec)
933 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
935 case VECTOR_CST:
936 return (sizeof (struct tree_vector)
937 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
939 case STRING_CST:
940 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
942 case OMP_CLAUSE:
943 return (sizeof (struct tree_omp_clause)
944 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
945 * sizeof (tree));
947 default:
948 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
949 return (sizeof (struct tree_exp)
950 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
951 else
952 return tree_code_size (code);
956 /* Return tree node kind based on tree CODE. */
958 static tree_node_kind
959 get_stats_node_kind (enum tree_code code)
961 enum tree_code_class type = TREE_CODE_CLASS (code);
963 switch (type)
965 case tcc_declaration: /* A decl node */
966 return d_kind;
967 case tcc_type: /* a type node */
968 return t_kind;
969 case tcc_statement: /* an expression with side effects */
970 return s_kind;
971 case tcc_reference: /* a reference */
972 return r_kind;
973 case tcc_expression: /* an expression */
974 case tcc_comparison: /* a comparison expression */
975 case tcc_unary: /* a unary arithmetic expression */
976 case tcc_binary: /* a binary arithmetic expression */
977 return e_kind;
978 case tcc_constant: /* a constant */
979 return c_kind;
980 case tcc_exceptional: /* something random, like an identifier. */
981 switch (code)
983 case IDENTIFIER_NODE:
984 return id_kind;
985 case TREE_VEC:
986 return vec_kind;
987 case TREE_BINFO:
988 return binfo_kind;
989 case SSA_NAME:
990 return ssa_name_kind;
991 case BLOCK:
992 return b_kind;
993 case CONSTRUCTOR:
994 return constr_kind;
995 case OMP_CLAUSE:
996 return omp_clause_kind;
997 default:
998 return x_kind;
1000 break;
1001 case tcc_vl_exp:
1002 return e_kind;
1003 default:
1004 gcc_unreachable ();
1008 /* Record interesting allocation statistics for a tree node with CODE
1009 and LENGTH. */
1011 static void
1012 record_node_allocation_statistics (enum tree_code code, size_t length)
1014 if (!GATHER_STATISTICS)
1015 return;
1017 tree_node_kind kind = get_stats_node_kind (code);
1019 tree_code_counts[(int) code]++;
1020 tree_node_counts[(int) kind]++;
1021 tree_node_sizes[(int) kind] += length;
1024 /* Allocate and return a new UID from the DECL_UID namespace. */
1027 allocate_decl_uid (void)
1029 return next_decl_uid++;
1032 /* Return a newly allocated node of code CODE. For decl and type
1033 nodes, some other fields are initialized. The rest of the node is
1034 initialized to zero. This function cannot be used for TREE_VEC,
1035 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1036 tree_code_size.
1038 Achoo! I got a code in the node. */
1040 tree
1041 make_node (enum tree_code code MEM_STAT_DECL)
1043 tree t;
1044 enum tree_code_class type = TREE_CODE_CLASS (code);
1045 size_t length = tree_code_size (code);
1047 record_node_allocation_statistics (code, length);
1049 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1050 TREE_SET_CODE (t, code);
1052 switch (type)
1054 case tcc_statement:
1055 if (code != DEBUG_BEGIN_STMT)
1056 TREE_SIDE_EFFECTS (t) = 1;
1057 break;
1059 case tcc_declaration:
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1062 if (code == FUNCTION_DECL)
1064 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1065 SET_DECL_MODE (t, FUNCTION_MODE);
1067 else
1068 SET_DECL_ALIGN (t, 1);
1070 DECL_SOURCE_LOCATION (t) = input_location;
1071 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1072 DECL_UID (t) = --next_debug_decl_uid;
1073 else
1075 DECL_UID (t) = allocate_decl_uid ();
1076 SET_DECL_PT_UID (t, -1);
1078 if (TREE_CODE (t) == LABEL_DECL)
1079 LABEL_DECL_UID (t) = -1;
1081 break;
1083 case tcc_type:
1084 TYPE_UID (t) = next_type_uid++;
1085 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1086 TYPE_USER_ALIGN (t) = 0;
1087 TYPE_MAIN_VARIANT (t) = t;
1088 TYPE_CANONICAL (t) = t;
1090 /* Default to no attributes for type, but let target change that. */
1091 TYPE_ATTRIBUTES (t) = NULL_TREE;
1092 targetm.set_default_type_attributes (t);
1094 /* We have not yet computed the alias set for this type. */
1095 TYPE_ALIAS_SET (t) = -1;
1096 break;
1098 case tcc_constant:
1099 TREE_CONSTANT (t) = 1;
1100 break;
1102 case tcc_expression:
1103 switch (code)
1105 case INIT_EXPR:
1106 case MODIFY_EXPR:
1107 case VA_ARG_EXPR:
1108 case PREDECREMENT_EXPR:
1109 case PREINCREMENT_EXPR:
1110 case POSTDECREMENT_EXPR:
1111 case POSTINCREMENT_EXPR:
1112 /* All of these have side-effects, no matter what their
1113 operands are. */
1114 TREE_SIDE_EFFECTS (t) = 1;
1115 break;
1117 default:
1118 break;
1120 break;
1122 case tcc_exceptional:
1123 switch (code)
1125 case TARGET_OPTION_NODE:
1126 TREE_TARGET_OPTION(t)
1127 = ggc_cleared_alloc<struct cl_target_option> ();
1128 break;
1130 case OPTIMIZATION_NODE:
1131 TREE_OPTIMIZATION (t)
1132 = ggc_cleared_alloc<struct cl_optimization> ();
1133 break;
1135 default:
1136 break;
1138 break;
1140 default:
1141 /* Other classes need no special treatment. */
1142 break;
1145 return t;
1148 /* Free tree node. */
1150 void
1151 free_node (tree node)
1153 enum tree_code code = TREE_CODE (node);
1154 if (GATHER_STATISTICS)
1156 enum tree_node_kind kind = get_stats_node_kind (code);
1158 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1159 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1160 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1162 tree_code_counts[(int) TREE_CODE (node)]--;
1163 tree_node_counts[(int) kind]--;
1164 tree_node_sizes[(int) kind] -= tree_size (node);
1166 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1167 vec_free (CONSTRUCTOR_ELTS (node));
1168 else if (code == BLOCK)
1169 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1170 else if (code == TREE_BINFO)
1171 vec_free (BINFO_BASE_ACCESSES (node));
1172 else if (code == OPTIMIZATION_NODE)
1173 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1174 else if (code == TARGET_OPTION_NODE)
1175 cl_target_option_free (TREE_TARGET_OPTION (node));
1176 ggc_free (node);
1179 /* Return a new node with the same contents as NODE except that its
1180 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1182 tree
1183 copy_node (tree node MEM_STAT_DECL)
1185 tree t;
1186 enum tree_code code = TREE_CODE (node);
1187 size_t length;
1189 gcc_assert (code != STATEMENT_LIST);
1191 length = tree_size (node);
1192 record_node_allocation_statistics (code, length);
1193 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1194 memcpy (t, node, length);
1196 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1197 TREE_CHAIN (t) = 0;
1198 TREE_ASM_WRITTEN (t) = 0;
1199 TREE_VISITED (t) = 0;
1201 if (TREE_CODE_CLASS (code) == tcc_declaration)
1203 if (code == DEBUG_EXPR_DECL)
1204 DECL_UID (t) = --next_debug_decl_uid;
1205 else
1207 DECL_UID (t) = allocate_decl_uid ();
1208 if (DECL_PT_UID_SET_P (node))
1209 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1211 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1212 && DECL_HAS_VALUE_EXPR_P (node))
1214 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1215 DECL_HAS_VALUE_EXPR_P (t) = 1;
1217 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1218 if (VAR_P (node))
1220 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1221 t->decl_with_vis.symtab_node = NULL;
1223 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1225 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1226 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1228 if (TREE_CODE (node) == FUNCTION_DECL)
1230 DECL_STRUCT_FUNCTION (t) = NULL;
1231 t->decl_with_vis.symtab_node = NULL;
1234 else if (TREE_CODE_CLASS (code) == tcc_type)
1236 TYPE_UID (t) = next_type_uid++;
1237 /* The following is so that the debug code for
1238 the copy is different from the original type.
1239 The two statements usually duplicate each other
1240 (because they clear fields of the same union),
1241 but the optimizer should catch that. */
1242 TYPE_SYMTAB_ADDRESS (t) = 0;
1243 TYPE_SYMTAB_DIE (t) = 0;
1245 /* Do not copy the values cache. */
1246 if (TYPE_CACHED_VALUES_P (t))
1248 TYPE_CACHED_VALUES_P (t) = 0;
1249 TYPE_CACHED_VALUES (t) = NULL_TREE;
1252 else if (code == TARGET_OPTION_NODE)
1254 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1255 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1256 sizeof (struct cl_target_option));
1258 else if (code == OPTIMIZATION_NODE)
1260 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1261 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1262 sizeof (struct cl_optimization));
1265 return t;
1268 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1269 For example, this can copy a list made of TREE_LIST nodes. */
1271 tree
1272 copy_list (tree list)
1274 tree head;
1275 tree prev, next;
1277 if (list == 0)
1278 return 0;
1280 head = prev = copy_node (list);
1281 next = TREE_CHAIN (list);
1282 while (next)
1284 TREE_CHAIN (prev) = copy_node (next);
1285 prev = TREE_CHAIN (prev);
1286 next = TREE_CHAIN (next);
1288 return head;
1292 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1293 INTEGER_CST with value CST and type TYPE. */
1295 static unsigned int
1296 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1298 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1299 /* We need extra HWIs if CST is an unsigned integer with its
1300 upper bit set. */
1301 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1302 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1303 return cst.get_len ();
1306 /* Return a new INTEGER_CST with value CST and type TYPE. */
1308 static tree
1309 build_new_int_cst (tree type, const wide_int &cst)
1311 unsigned int len = cst.get_len ();
1312 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1313 tree nt = make_int_cst (len, ext_len);
1315 if (len < ext_len)
1317 --ext_len;
1318 TREE_INT_CST_ELT (nt, ext_len)
1319 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1320 for (unsigned int i = len; i < ext_len; ++i)
1321 TREE_INT_CST_ELT (nt, i) = -1;
1323 else if (TYPE_UNSIGNED (type)
1324 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1326 len--;
1327 TREE_INT_CST_ELT (nt, len)
1328 = zext_hwi (cst.elt (len),
1329 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1332 for (unsigned int i = 0; i < len; i++)
1333 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1334 TREE_TYPE (nt) = type;
1335 return nt;
1338 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1340 static tree
1341 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1342 CXX_MEM_STAT_INFO)
1344 size_t length = sizeof (struct tree_poly_int_cst);
1345 record_node_allocation_statistics (POLY_INT_CST, length);
1347 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1349 TREE_SET_CODE (t, POLY_INT_CST);
1350 TREE_CONSTANT (t) = 1;
1351 TREE_TYPE (t) = type;
1352 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1353 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1354 return t;
1357 /* Create a constant tree that contains CST sign-extended to TYPE. */
1359 tree
1360 build_int_cst (tree type, poly_int64 cst)
1362 /* Support legacy code. */
1363 if (!type)
1364 type = integer_type_node;
1366 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1369 /* Create a constant tree that contains CST zero-extended to TYPE. */
1371 tree
1372 build_int_cstu (tree type, poly_uint64 cst)
1374 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1377 /* Create a constant tree that contains CST sign-extended to TYPE. */
1379 tree
1380 build_int_cst_type (tree type, poly_int64 cst)
1382 gcc_assert (type);
1383 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1386 /* Constructs tree in type TYPE from with value given by CST. Signedness
1387 of CST is assumed to be the same as the signedness of TYPE. */
1389 tree
1390 double_int_to_tree (tree type, double_int cst)
1392 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1395 /* We force the wide_int CST to the range of the type TYPE by sign or
1396 zero extending it. OVERFLOWABLE indicates if we are interested in
1397 overflow of the value, when >0 we are only interested in signed
1398 overflow, for <0 we are interested in any overflow. OVERFLOWED
1399 indicates whether overflow has already occurred. CONST_OVERFLOWED
1400 indicates whether constant overflow has already occurred. We force
1401 T's value to be within range of T's type (by setting to 0 or 1 all
1402 the bits outside the type's range). We set TREE_OVERFLOWED if,
1403 OVERFLOWED is nonzero,
1404 or OVERFLOWABLE is >0 and signed overflow occurs
1405 or OVERFLOWABLE is <0 and any overflow occurs
1406 We return a new tree node for the extended wide_int. The node
1407 is shared if no overflow flags are set. */
1410 tree
1411 force_fit_type (tree type, const poly_wide_int_ref &cst,
1412 int overflowable, bool overflowed)
1414 signop sign = TYPE_SIGN (type);
1416 /* If we need to set overflow flags, return a new unshared node. */
1417 if (overflowed || !wi::fits_to_tree_p (cst, type))
1419 if (overflowed
1420 || overflowable < 0
1421 || (overflowable > 0 && sign == SIGNED))
1423 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1424 sign);
1425 tree t;
1426 if (tmp.is_constant ())
1427 t = build_new_int_cst (type, tmp.coeffs[0]);
1428 else
1430 tree coeffs[NUM_POLY_INT_COEFFS];
1431 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1433 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1434 TREE_OVERFLOW (coeffs[i]) = 1;
1436 t = build_new_poly_int_cst (type, coeffs);
1438 TREE_OVERFLOW (t) = 1;
1439 return t;
1443 /* Else build a shared node. */
1444 return wide_int_to_tree (type, cst);
1447 /* These are the hash table functions for the hash table of INTEGER_CST
1448 nodes of a sizetype. */
1450 /* Return the hash code X, an INTEGER_CST. */
1452 hashval_t
1453 int_cst_hasher::hash (tree x)
1455 const_tree const t = x;
1456 hashval_t code = TYPE_UID (TREE_TYPE (t));
1457 int i;
1459 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1460 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1462 return code;
1465 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1466 is the same as that given by *Y, which is the same. */
1468 bool
1469 int_cst_hasher::equal (tree x, tree y)
1471 const_tree const xt = x;
1472 const_tree const yt = y;
1474 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1475 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1476 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1477 return false;
1479 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1480 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1481 return false;
1483 return true;
1486 /* Create an INT_CST node of TYPE and value CST.
1487 The returned node is always shared. For small integers we use a
1488 per-type vector cache, for larger ones we use a single hash table.
1489 The value is extended from its precision according to the sign of
1490 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1491 the upper bits and ensures that hashing and value equality based
1492 upon the underlying HOST_WIDE_INTs works without masking. */
1494 static tree
1495 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1497 tree t;
1498 int ix = -1;
1499 int limit = 0;
1501 gcc_assert (type);
1502 unsigned int prec = TYPE_PRECISION (type);
1503 signop sgn = TYPE_SIGN (type);
1505 /* Verify that everything is canonical. */
1506 int l = pcst.get_len ();
1507 if (l > 1)
1509 if (pcst.elt (l - 1) == 0)
1510 gcc_checking_assert (pcst.elt (l - 2) < 0);
1511 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1512 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1515 wide_int cst = wide_int::from (pcst, prec, sgn);
1516 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1518 if (ext_len == 1)
1520 /* We just need to store a single HOST_WIDE_INT. */
1521 HOST_WIDE_INT hwi;
1522 if (TYPE_UNSIGNED (type))
1523 hwi = cst.to_uhwi ();
1524 else
1525 hwi = cst.to_shwi ();
1527 switch (TREE_CODE (type))
1529 case NULLPTR_TYPE:
1530 gcc_assert (hwi == 0);
1531 /* Fallthru. */
1533 case POINTER_TYPE:
1534 case REFERENCE_TYPE:
1535 /* Cache NULL pointer and zero bounds. */
1536 if (hwi == 0)
1538 limit = 1;
1539 ix = 0;
1541 break;
1543 case BOOLEAN_TYPE:
1544 /* Cache false or true. */
1545 limit = 2;
1546 if (IN_RANGE (hwi, 0, 1))
1547 ix = hwi;
1548 break;
1550 case INTEGER_TYPE:
1551 case OFFSET_TYPE:
1552 if (TYPE_SIGN (type) == UNSIGNED)
1554 /* Cache [0, N). */
1555 limit = param_integer_share_limit;
1556 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1557 ix = hwi;
1559 else
1561 /* Cache [-1, N). */
1562 limit = param_integer_share_limit + 1;
1563 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1564 ix = hwi + 1;
1566 break;
1568 case ENUMERAL_TYPE:
1569 break;
1571 default:
1572 gcc_unreachable ();
1575 if (ix >= 0)
1577 /* Look for it in the type's vector of small shared ints. */
1578 if (!TYPE_CACHED_VALUES_P (type))
1580 TYPE_CACHED_VALUES_P (type) = 1;
1581 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1584 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1585 if (t)
1586 /* Make sure no one is clobbering the shared constant. */
1587 gcc_checking_assert (TREE_TYPE (t) == type
1588 && TREE_INT_CST_NUNITS (t) == 1
1589 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1590 && TREE_INT_CST_EXT_NUNITS (t) == 1
1591 && TREE_INT_CST_ELT (t, 0) == hwi);
1592 else
1594 /* Create a new shared int. */
1595 t = build_new_int_cst (type, cst);
1596 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1599 else
1601 /* Use the cache of larger shared ints, using int_cst_node as
1602 a temporary. */
1604 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1605 TREE_TYPE (int_cst_node) = type;
1607 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1608 t = *slot;
1609 if (!t)
1611 /* Insert this one into the hash table. */
1612 t = int_cst_node;
1613 *slot = t;
1614 /* Make a new node for next time round. */
1615 int_cst_node = make_int_cst (1, 1);
1619 else
1621 /* The value either hashes properly or we drop it on the floor
1622 for the gc to take care of. There will not be enough of them
1623 to worry about. */
1625 tree nt = build_new_int_cst (type, cst);
1626 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1627 t = *slot;
1628 if (!t)
1630 /* Insert this one into the hash table. */
1631 t = nt;
1632 *slot = t;
1634 else
1635 ggc_free (nt);
1638 return t;
1641 hashval_t
1642 poly_int_cst_hasher::hash (tree t)
1644 inchash::hash hstate;
1646 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1647 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1648 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1650 return hstate.end ();
1653 bool
1654 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1656 if (TREE_TYPE (x) != y.first)
1657 return false;
1658 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1659 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1660 return false;
1661 return true;
1664 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1665 The elements must also have type TYPE. */
1667 tree
1668 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1670 unsigned int prec = TYPE_PRECISION (type);
1671 gcc_assert (prec <= values.coeffs[0].get_precision ());
1672 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1674 inchash::hash h;
1675 h.add_int (TYPE_UID (type));
1676 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1677 h.add_wide_int (c.coeffs[i]);
1678 poly_int_cst_hasher::compare_type comp (type, &c);
1679 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1680 INSERT);
1681 if (*slot == NULL_TREE)
1683 tree coeffs[NUM_POLY_INT_COEFFS];
1684 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1685 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1686 *slot = build_new_poly_int_cst (type, coeffs);
1688 return *slot;
1691 /* Create a constant tree with value VALUE in type TYPE. */
1693 tree
1694 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1696 if (value.is_constant ())
1697 return wide_int_to_tree_1 (type, value.coeffs[0]);
1698 return build_poly_int_cst (type, value);
1701 void
1702 cache_integer_cst (tree t)
1704 tree type = TREE_TYPE (t);
1705 int ix = -1;
1706 int limit = 0;
1707 int prec = TYPE_PRECISION (type);
1709 gcc_assert (!TREE_OVERFLOW (t));
1711 switch (TREE_CODE (type))
1713 case NULLPTR_TYPE:
1714 gcc_assert (integer_zerop (t));
1715 /* Fallthru. */
1717 case POINTER_TYPE:
1718 case REFERENCE_TYPE:
1719 /* Cache NULL pointer. */
1720 if (integer_zerop (t))
1722 limit = 1;
1723 ix = 0;
1725 break;
1727 case BOOLEAN_TYPE:
1728 /* Cache false or true. */
1729 limit = 2;
1730 if (wi::ltu_p (wi::to_wide (t), 2))
1731 ix = TREE_INT_CST_ELT (t, 0);
1732 break;
1734 case INTEGER_TYPE:
1735 case OFFSET_TYPE:
1736 if (TYPE_UNSIGNED (type))
1738 /* Cache 0..N */
1739 limit = param_integer_share_limit;
1741 /* This is a little hokie, but if the prec is smaller than
1742 what is necessary to hold param_integer_share_limit, then the
1743 obvious test will not get the correct answer. */
1744 if (prec < HOST_BITS_PER_WIDE_INT)
1746 if (tree_to_uhwi (t)
1747 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1748 ix = tree_to_uhwi (t);
1750 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1751 ix = tree_to_uhwi (t);
1753 else
1755 /* Cache -1..N */
1756 limit = param_integer_share_limit + 1;
1758 if (integer_minus_onep (t))
1759 ix = 0;
1760 else if (!wi::neg_p (wi::to_wide (t)))
1762 if (prec < HOST_BITS_PER_WIDE_INT)
1764 if (tree_to_shwi (t) < param_integer_share_limit)
1765 ix = tree_to_shwi (t) + 1;
1767 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1768 ix = tree_to_shwi (t) + 1;
1771 break;
1773 case ENUMERAL_TYPE:
1774 break;
1776 default:
1777 gcc_unreachable ();
1780 if (ix >= 0)
1782 /* Look for it in the type's vector of small shared ints. */
1783 if (!TYPE_CACHED_VALUES_P (type))
1785 TYPE_CACHED_VALUES_P (type) = 1;
1786 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1789 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1790 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1792 else
1794 /* Use the cache of larger shared ints. */
1795 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1796 /* If there is already an entry for the number verify it's the
1797 same. */
1798 if (*slot)
1799 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1800 else
1801 /* Otherwise insert this one into the hash table. */
1802 *slot = t;
1807 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1808 and the rest are zeros. */
1810 tree
1811 build_low_bits_mask (tree type, unsigned bits)
1813 gcc_assert (bits <= TYPE_PRECISION (type));
1815 return wide_int_to_tree (type, wi::mask (bits, false,
1816 TYPE_PRECISION (type)));
1819 /* Checks that X is integer constant that can be expressed in (unsigned)
1820 HOST_WIDE_INT without loss of precision. */
1822 bool
1823 cst_and_fits_in_hwi (const_tree x)
1825 return (TREE_CODE (x) == INTEGER_CST
1826 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1829 /* Build a newly constructed VECTOR_CST with the given values of
1830 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1832 tree
1833 make_vector (unsigned log2_npatterns,
1834 unsigned int nelts_per_pattern MEM_STAT_DECL)
1836 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1837 tree t;
1838 unsigned npatterns = 1 << log2_npatterns;
1839 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1840 unsigned length = (sizeof (struct tree_vector)
1841 + (encoded_nelts - 1) * sizeof (tree));
1843 record_node_allocation_statistics (VECTOR_CST, length);
1845 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1847 TREE_SET_CODE (t, VECTOR_CST);
1848 TREE_CONSTANT (t) = 1;
1849 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1850 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1852 return t;
1855 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1856 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1858 tree
1859 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1861 if (vec_safe_length (v) == 0)
1862 return build_zero_cst (type);
1864 unsigned HOST_WIDE_INT idx, nelts;
1865 tree value;
1867 /* We can't construct a VECTOR_CST for a variable number of elements. */
1868 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1869 tree_vector_builder vec (type, nelts, 1);
1870 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1872 if (TREE_CODE (value) == VECTOR_CST)
1874 /* If NELTS is constant then this must be too. */
1875 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1876 for (unsigned i = 0; i < sub_nelts; ++i)
1877 vec.quick_push (VECTOR_CST_ELT (value, i));
1879 else
1880 vec.quick_push (value);
1882 while (vec.length () < nelts)
1883 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1885 return vec.build ();
1888 /* Build a vector of type VECTYPE where all the elements are SCs. */
1889 tree
1890 build_vector_from_val (tree vectype, tree sc)
1892 unsigned HOST_WIDE_INT i, nunits;
1894 if (sc == error_mark_node)
1895 return sc;
1897 /* Verify that the vector type is suitable for SC. Note that there
1898 is some inconsistency in the type-system with respect to restrict
1899 qualifications of pointers. Vector types always have a main-variant
1900 element type and the qualification is applied to the vector-type.
1901 So TREE_TYPE (vector-type) does not return a properly qualified
1902 vector element-type. */
1903 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1904 TREE_TYPE (vectype)));
1906 if (CONSTANT_CLASS_P (sc))
1908 tree_vector_builder v (vectype, 1, 1);
1909 v.quick_push (sc);
1910 return v.build ();
1912 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1913 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1914 else
1916 vec<constructor_elt, va_gc> *v;
1917 vec_alloc (v, nunits);
1918 for (i = 0; i < nunits; ++i)
1919 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1920 return build_constructor (vectype, v);
1924 /* If TYPE is not a vector type, just return SC, otherwise return
1925 build_vector_from_val (TYPE, SC). */
1927 tree
1928 build_uniform_cst (tree type, tree sc)
1930 if (!VECTOR_TYPE_P (type))
1931 return sc;
1933 return build_vector_from_val (type, sc);
1936 /* Build a vector series of type TYPE in which element I has the value
1937 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1938 and a VEC_SERIES_EXPR otherwise. */
1940 tree
1941 build_vec_series (tree type, tree base, tree step)
1943 if (integer_zerop (step))
1944 return build_vector_from_val (type, base);
1945 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1947 tree_vector_builder builder (type, 1, 3);
1948 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1949 wi::to_wide (base) + wi::to_wide (step));
1950 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1951 wi::to_wide (elt1) + wi::to_wide (step));
1952 builder.quick_push (base);
1953 builder.quick_push (elt1);
1954 builder.quick_push (elt2);
1955 return builder.build ();
1957 return build2 (VEC_SERIES_EXPR, type, base, step);
1960 /* Return a vector with the same number of units and number of bits
1961 as VEC_TYPE, but in which the elements are a linear series of unsigned
1962 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1964 tree
1965 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1967 tree index_vec_type = vec_type;
1968 tree index_elt_type = TREE_TYPE (vec_type);
1969 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1970 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1972 index_elt_type = build_nonstandard_integer_type
1973 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1974 index_vec_type = build_vector_type (index_elt_type, nunits);
1977 tree_vector_builder v (index_vec_type, 1, 3);
1978 for (unsigned int i = 0; i < 3; ++i)
1979 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1980 return v.build ();
1983 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1984 elements are A and the rest are B. */
1986 tree
1987 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1989 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1990 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1991 /* Optimize the constant case. */
1992 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1993 count /= 2;
1994 tree_vector_builder builder (vec_type, count, 2);
1995 for (unsigned int i = 0; i < count * 2; ++i)
1996 builder.quick_push (i < num_a ? a : b);
1997 return builder.build ();
2000 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2001 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2003 void
2004 recompute_constructor_flags (tree c)
2006 unsigned int i;
2007 tree val;
2008 bool constant_p = true;
2009 bool side_effects_p = false;
2010 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2012 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2014 /* Mostly ctors will have elts that don't have side-effects, so
2015 the usual case is to scan all the elements. Hence a single
2016 loop for both const and side effects, rather than one loop
2017 each (with early outs). */
2018 if (!TREE_CONSTANT (val))
2019 constant_p = false;
2020 if (TREE_SIDE_EFFECTS (val))
2021 side_effects_p = true;
2024 TREE_SIDE_EFFECTS (c) = side_effects_p;
2025 TREE_CONSTANT (c) = constant_p;
2028 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2029 CONSTRUCTOR C. */
2031 void
2032 verify_constructor_flags (tree c)
2034 unsigned int i;
2035 tree val;
2036 bool constant_p = TREE_CONSTANT (c);
2037 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2038 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2040 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2042 if (constant_p && !TREE_CONSTANT (val))
2043 internal_error ("non-constant element in constant CONSTRUCTOR");
2044 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2045 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2049 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2050 are in the vec pointed to by VALS. */
2051 tree
2052 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2054 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2056 TREE_TYPE (c) = type;
2057 CONSTRUCTOR_ELTS (c) = vals;
2059 recompute_constructor_flags (c);
2061 return c;
2064 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2065 INDEX and VALUE. */
2066 tree
2067 build_constructor_single (tree type, tree index, tree value)
2069 vec<constructor_elt, va_gc> *v;
2070 constructor_elt elt = {index, value};
2072 vec_alloc (v, 1);
2073 v->quick_push (elt);
2075 return build_constructor (type, v);
2079 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2080 are in a list pointed to by VALS. */
2081 tree
2082 build_constructor_from_list (tree type, tree vals)
2084 tree t;
2085 vec<constructor_elt, va_gc> *v = NULL;
2087 if (vals)
2089 vec_alloc (v, list_length (vals));
2090 for (t = vals; t; t = TREE_CHAIN (t))
2091 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2094 return build_constructor (type, v);
2097 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2098 of elements, provided as index/value pairs. */
2100 tree
2101 build_constructor_va (tree type, int nelts, ...)
2103 vec<constructor_elt, va_gc> *v = NULL;
2104 va_list p;
2106 va_start (p, nelts);
2107 vec_alloc (v, nelts);
2108 while (nelts--)
2110 tree index = va_arg (p, tree);
2111 tree value = va_arg (p, tree);
2112 CONSTRUCTOR_APPEND_ELT (v, index, value);
2114 va_end (p);
2115 return build_constructor (type, v);
2118 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2120 tree
2121 build_clobber (tree type)
2123 tree clobber = build_constructor (type, NULL);
2124 TREE_THIS_VOLATILE (clobber) = true;
2125 return clobber;
2128 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2130 tree
2131 build_fixed (tree type, FIXED_VALUE_TYPE f)
2133 tree v;
2134 FIXED_VALUE_TYPE *fp;
2136 v = make_node (FIXED_CST);
2137 fp = ggc_alloc<fixed_value> ();
2138 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2140 TREE_TYPE (v) = type;
2141 TREE_FIXED_CST_PTR (v) = fp;
2142 return v;
2145 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2147 tree
2148 build_real (tree type, REAL_VALUE_TYPE d)
2150 tree v;
2151 REAL_VALUE_TYPE *dp;
2152 int overflow = 0;
2154 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2155 Consider doing it via real_convert now. */
2157 v = make_node (REAL_CST);
2158 dp = ggc_alloc<real_value> ();
2159 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2161 TREE_TYPE (v) = type;
2162 TREE_REAL_CST_PTR (v) = dp;
2163 TREE_OVERFLOW (v) = overflow;
2164 return v;
2167 /* Like build_real, but first truncate D to the type. */
2169 tree
2170 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2172 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2175 /* Return a new REAL_CST node whose type is TYPE
2176 and whose value is the integer value of the INTEGER_CST node I. */
2178 REAL_VALUE_TYPE
2179 real_value_from_int_cst (const_tree type, const_tree i)
2181 REAL_VALUE_TYPE d;
2183 /* Clear all bits of the real value type so that we can later do
2184 bitwise comparisons to see if two values are the same. */
2185 memset (&d, 0, sizeof d);
2187 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2188 TYPE_SIGN (TREE_TYPE (i)));
2189 return d;
2192 /* Given a tree representing an integer constant I, return a tree
2193 representing the same value as a floating-point constant of type TYPE. */
2195 tree
2196 build_real_from_int_cst (tree type, const_tree i)
2198 tree v;
2199 int overflow = TREE_OVERFLOW (i);
2201 v = build_real (type, real_value_from_int_cst (type, i));
2203 TREE_OVERFLOW (v) |= overflow;
2204 return v;
2207 /* Return a newly constructed STRING_CST node whose value is
2208 the LEN characters at STR.
2209 Note that for a C string literal, LEN should include the trailing NUL.
2210 The TREE_TYPE is not initialized. */
2212 tree
2213 build_string (int len, const char *str)
2215 tree s;
2216 size_t length;
2218 /* Do not waste bytes provided by padding of struct tree_string. */
2219 length = len + offsetof (struct tree_string, str) + 1;
2221 record_node_allocation_statistics (STRING_CST, length);
2223 s = (tree) ggc_internal_alloc (length);
2225 memset (s, 0, sizeof (struct tree_typed));
2226 TREE_SET_CODE (s, STRING_CST);
2227 TREE_CONSTANT (s) = 1;
2228 TREE_STRING_LENGTH (s) = len;
2229 memcpy (s->string.str, str, len);
2230 s->string.str[len] = '\0';
2232 return s;
2235 /* Return a newly constructed COMPLEX_CST node whose value is
2236 specified by the real and imaginary parts REAL and IMAG.
2237 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2238 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2240 tree
2241 build_complex (tree type, tree real, tree imag)
2243 gcc_assert (CONSTANT_CLASS_P (real));
2244 gcc_assert (CONSTANT_CLASS_P (imag));
2246 tree t = make_node (COMPLEX_CST);
2248 TREE_REALPART (t) = real;
2249 TREE_IMAGPART (t) = imag;
2250 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2251 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2252 return t;
2255 /* Build a complex (inf +- 0i), such as for the result of cproj.
2256 TYPE is the complex tree type of the result. If NEG is true, the
2257 imaginary zero is negative. */
2259 tree
2260 build_complex_inf (tree type, bool neg)
2262 REAL_VALUE_TYPE rinf, rzero = dconst0;
2264 real_inf (&rinf);
2265 rzero.sign = neg;
2266 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2267 build_real (TREE_TYPE (type), rzero));
2270 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2271 element is set to 1. In particular, this is 1 + i for complex types. */
2273 tree
2274 build_each_one_cst (tree type)
2276 if (TREE_CODE (type) == COMPLEX_TYPE)
2278 tree scalar = build_one_cst (TREE_TYPE (type));
2279 return build_complex (type, scalar, scalar);
2281 else
2282 return build_one_cst (type);
2285 /* Return a constant of arithmetic type TYPE which is the
2286 multiplicative identity of the set TYPE. */
2288 tree
2289 build_one_cst (tree type)
2291 switch (TREE_CODE (type))
2293 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2294 case POINTER_TYPE: case REFERENCE_TYPE:
2295 case OFFSET_TYPE:
2296 return build_int_cst (type, 1);
2298 case REAL_TYPE:
2299 return build_real (type, dconst1);
2301 case FIXED_POINT_TYPE:
2302 /* We can only generate 1 for accum types. */
2303 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2304 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2306 case VECTOR_TYPE:
2308 tree scalar = build_one_cst (TREE_TYPE (type));
2310 return build_vector_from_val (type, scalar);
2313 case COMPLEX_TYPE:
2314 return build_complex (type,
2315 build_one_cst (TREE_TYPE (type)),
2316 build_zero_cst (TREE_TYPE (type)));
2318 default:
2319 gcc_unreachable ();
2323 /* Return an integer of type TYPE containing all 1's in as much precision as
2324 it contains, or a complex or vector whose subparts are such integers. */
2326 tree
2327 build_all_ones_cst (tree type)
2329 if (TREE_CODE (type) == COMPLEX_TYPE)
2331 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2332 return build_complex (type, scalar, scalar);
2334 else
2335 return build_minus_one_cst (type);
2338 /* Return a constant of arithmetic type TYPE which is the
2339 opposite of the multiplicative identity of the set TYPE. */
2341 tree
2342 build_minus_one_cst (tree type)
2344 switch (TREE_CODE (type))
2346 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2347 case POINTER_TYPE: case REFERENCE_TYPE:
2348 case OFFSET_TYPE:
2349 return build_int_cst (type, -1);
2351 case REAL_TYPE:
2352 return build_real (type, dconstm1);
2354 case FIXED_POINT_TYPE:
2355 /* We can only generate 1 for accum types. */
2356 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2357 return build_fixed (type,
2358 fixed_from_double_int (double_int_minus_one,
2359 SCALAR_TYPE_MODE (type)));
2361 case VECTOR_TYPE:
2363 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2365 return build_vector_from_val (type, scalar);
2368 case COMPLEX_TYPE:
2369 return build_complex (type,
2370 build_minus_one_cst (TREE_TYPE (type)),
2371 build_zero_cst (TREE_TYPE (type)));
2373 default:
2374 gcc_unreachable ();
2378 /* Build 0 constant of type TYPE. This is used by constructor folding
2379 and thus the constant should be represented in memory by
2380 zero(es). */
2382 tree
2383 build_zero_cst (tree type)
2385 switch (TREE_CODE (type))
2387 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2388 case POINTER_TYPE: case REFERENCE_TYPE:
2389 case OFFSET_TYPE: case NULLPTR_TYPE:
2390 return build_int_cst (type, 0);
2392 case REAL_TYPE:
2393 return build_real (type, dconst0);
2395 case FIXED_POINT_TYPE:
2396 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2398 case VECTOR_TYPE:
2400 tree scalar = build_zero_cst (TREE_TYPE (type));
2402 return build_vector_from_val (type, scalar);
2405 case COMPLEX_TYPE:
2407 tree zero = build_zero_cst (TREE_TYPE (type));
2409 return build_complex (type, zero, zero);
2412 default:
2413 if (!AGGREGATE_TYPE_P (type))
2414 return fold_convert (type, integer_zero_node);
2415 return build_constructor (type, NULL);
2420 /* Build a BINFO with LEN language slots. */
2422 tree
2423 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2425 tree t;
2426 size_t length = (offsetof (struct tree_binfo, base_binfos)
2427 + vec<tree, va_gc>::embedded_size (base_binfos));
2429 record_node_allocation_statistics (TREE_BINFO, length);
2431 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2433 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2435 TREE_SET_CODE (t, TREE_BINFO);
2437 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2439 return t;
2442 /* Create a CASE_LABEL_EXPR tree node and return it. */
2444 tree
2445 build_case_label (tree low_value, tree high_value, tree label_decl)
2447 tree t = make_node (CASE_LABEL_EXPR);
2449 TREE_TYPE (t) = void_type_node;
2450 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2452 CASE_LOW (t) = low_value;
2453 CASE_HIGH (t) = high_value;
2454 CASE_LABEL (t) = label_decl;
2455 CASE_CHAIN (t) = NULL_TREE;
2457 return t;
2460 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2461 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2462 The latter determines the length of the HOST_WIDE_INT vector. */
2464 tree
2465 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2467 tree t;
2468 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2469 + sizeof (struct tree_int_cst));
2471 gcc_assert (len);
2472 record_node_allocation_statistics (INTEGER_CST, length);
2474 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2476 TREE_SET_CODE (t, INTEGER_CST);
2477 TREE_INT_CST_NUNITS (t) = len;
2478 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2479 /* to_offset can only be applied to trees that are offset_int-sized
2480 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2481 must be exactly the precision of offset_int and so LEN is correct. */
2482 if (ext_len <= OFFSET_INT_ELTS)
2483 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2484 else
2485 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2487 TREE_CONSTANT (t) = 1;
2489 return t;
2492 /* Build a newly constructed TREE_VEC node of length LEN. */
2494 tree
2495 make_tree_vec (int len MEM_STAT_DECL)
2497 tree t;
2498 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2500 record_node_allocation_statistics (TREE_VEC, length);
2502 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2504 TREE_SET_CODE (t, TREE_VEC);
2505 TREE_VEC_LENGTH (t) = len;
2507 return t;
2510 /* Grow a TREE_VEC node to new length LEN. */
2512 tree
2513 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2515 gcc_assert (TREE_CODE (v) == TREE_VEC);
2517 int oldlen = TREE_VEC_LENGTH (v);
2518 gcc_assert (len > oldlen);
2520 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2521 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2523 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2525 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2527 TREE_VEC_LENGTH (v) = len;
2529 return v;
2532 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2533 fixed, and scalar, complex or vector. */
2535 bool
2536 zerop (const_tree expr)
2538 return (integer_zerop (expr)
2539 || real_zerop (expr)
2540 || fixed_zerop (expr));
2543 /* Return 1 if EXPR is the integer constant zero or a complex constant
2544 of zero, or a location wrapper for such a constant. */
2546 bool
2547 integer_zerop (const_tree expr)
2549 STRIP_ANY_LOCATION_WRAPPER (expr);
2551 switch (TREE_CODE (expr))
2553 case INTEGER_CST:
2554 return wi::to_wide (expr) == 0;
2555 case COMPLEX_CST:
2556 return (integer_zerop (TREE_REALPART (expr))
2557 && integer_zerop (TREE_IMAGPART (expr)));
2558 case VECTOR_CST:
2559 return (VECTOR_CST_NPATTERNS (expr) == 1
2560 && VECTOR_CST_DUPLICATE_P (expr)
2561 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2562 default:
2563 return false;
2567 /* Return 1 if EXPR is the integer constant one or the corresponding
2568 complex constant, or a location wrapper for such a constant. */
2570 bool
2571 integer_onep (const_tree expr)
2573 STRIP_ANY_LOCATION_WRAPPER (expr);
2575 switch (TREE_CODE (expr))
2577 case INTEGER_CST:
2578 return wi::eq_p (wi::to_widest (expr), 1);
2579 case COMPLEX_CST:
2580 return (integer_onep (TREE_REALPART (expr))
2581 && integer_zerop (TREE_IMAGPART (expr)));
2582 case VECTOR_CST:
2583 return (VECTOR_CST_NPATTERNS (expr) == 1
2584 && VECTOR_CST_DUPLICATE_P (expr)
2585 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2586 default:
2587 return false;
2591 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2592 return 1 if every piece is the integer constant one.
2593 Also return 1 for location wrappers for such a constant. */
2595 bool
2596 integer_each_onep (const_tree expr)
2598 STRIP_ANY_LOCATION_WRAPPER (expr);
2600 if (TREE_CODE (expr) == COMPLEX_CST)
2601 return (integer_onep (TREE_REALPART (expr))
2602 && integer_onep (TREE_IMAGPART (expr)));
2603 else
2604 return integer_onep (expr);
2607 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2608 it contains, or a complex or vector whose subparts are such integers,
2609 or a location wrapper for such a constant. */
2611 bool
2612 integer_all_onesp (const_tree expr)
2614 STRIP_ANY_LOCATION_WRAPPER (expr);
2616 if (TREE_CODE (expr) == COMPLEX_CST
2617 && integer_all_onesp (TREE_REALPART (expr))
2618 && integer_all_onesp (TREE_IMAGPART (expr)))
2619 return true;
2621 else if (TREE_CODE (expr) == VECTOR_CST)
2622 return (VECTOR_CST_NPATTERNS (expr) == 1
2623 && VECTOR_CST_DUPLICATE_P (expr)
2624 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2626 else if (TREE_CODE (expr) != INTEGER_CST)
2627 return false;
2629 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2630 == wi::to_wide (expr));
2633 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2634 for such a constant. */
2636 bool
2637 integer_minus_onep (const_tree expr)
2639 STRIP_ANY_LOCATION_WRAPPER (expr);
2641 if (TREE_CODE (expr) == COMPLEX_CST)
2642 return (integer_all_onesp (TREE_REALPART (expr))
2643 && integer_zerop (TREE_IMAGPART (expr)));
2644 else
2645 return integer_all_onesp (expr);
2648 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2649 one bit on), or a location wrapper for such a constant. */
2651 bool
2652 integer_pow2p (const_tree expr)
2654 STRIP_ANY_LOCATION_WRAPPER (expr);
2656 if (TREE_CODE (expr) == COMPLEX_CST
2657 && integer_pow2p (TREE_REALPART (expr))
2658 && integer_zerop (TREE_IMAGPART (expr)))
2659 return true;
2661 if (TREE_CODE (expr) != INTEGER_CST)
2662 return false;
2664 return wi::popcount (wi::to_wide (expr)) == 1;
2667 /* Return 1 if EXPR is an integer constant other than zero or a
2668 complex constant other than zero, or a location wrapper for such a
2669 constant. */
2671 bool
2672 integer_nonzerop (const_tree expr)
2674 STRIP_ANY_LOCATION_WRAPPER (expr);
2676 return ((TREE_CODE (expr) == INTEGER_CST
2677 && wi::to_wide (expr) != 0)
2678 || (TREE_CODE (expr) == COMPLEX_CST
2679 && (integer_nonzerop (TREE_REALPART (expr))
2680 || integer_nonzerop (TREE_IMAGPART (expr)))));
2683 /* Return 1 if EXPR is the integer constant one. For vector,
2684 return 1 if every piece is the integer constant minus one
2685 (representing the value TRUE).
2686 Also return 1 for location wrappers for such a constant. */
2688 bool
2689 integer_truep (const_tree expr)
2691 STRIP_ANY_LOCATION_WRAPPER (expr);
2693 if (TREE_CODE (expr) == VECTOR_CST)
2694 return integer_all_onesp (expr);
2695 return integer_onep (expr);
2698 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2699 for such a constant. */
2701 bool
2702 fixed_zerop (const_tree expr)
2704 STRIP_ANY_LOCATION_WRAPPER (expr);
2706 return (TREE_CODE (expr) == FIXED_CST
2707 && TREE_FIXED_CST (expr).data.is_zero ());
2710 /* Return the power of two represented by a tree node known to be a
2711 power of two. */
2714 tree_log2 (const_tree expr)
2716 if (TREE_CODE (expr) == COMPLEX_CST)
2717 return tree_log2 (TREE_REALPART (expr));
2719 return wi::exact_log2 (wi::to_wide (expr));
2722 /* Similar, but return the largest integer Y such that 2 ** Y is less
2723 than or equal to EXPR. */
2726 tree_floor_log2 (const_tree expr)
2728 if (TREE_CODE (expr) == COMPLEX_CST)
2729 return tree_log2 (TREE_REALPART (expr));
2731 return wi::floor_log2 (wi::to_wide (expr));
2734 /* Return number of known trailing zero bits in EXPR, or, if the value of
2735 EXPR is known to be zero, the precision of it's type. */
2737 unsigned int
2738 tree_ctz (const_tree expr)
2740 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2741 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2742 return 0;
2744 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2745 switch (TREE_CODE (expr))
2747 case INTEGER_CST:
2748 ret1 = wi::ctz (wi::to_wide (expr));
2749 return MIN (ret1, prec);
2750 case SSA_NAME:
2751 ret1 = wi::ctz (get_nonzero_bits (expr));
2752 return MIN (ret1, prec);
2753 case PLUS_EXPR:
2754 case MINUS_EXPR:
2755 case BIT_IOR_EXPR:
2756 case BIT_XOR_EXPR:
2757 case MIN_EXPR:
2758 case MAX_EXPR:
2759 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2760 if (ret1 == 0)
2761 return ret1;
2762 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2763 return MIN (ret1, ret2);
2764 case POINTER_PLUS_EXPR:
2765 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2767 /* Second operand is sizetype, which could be in theory
2768 wider than pointer's precision. Make sure we never
2769 return more than prec. */
2770 ret2 = MIN (ret2, prec);
2771 return MIN (ret1, ret2);
2772 case BIT_AND_EXPR:
2773 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2774 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2775 return MAX (ret1, ret2);
2776 case MULT_EXPR:
2777 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2778 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2779 return MIN (ret1 + ret2, prec);
2780 case LSHIFT_EXPR:
2781 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2782 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2783 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2785 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2786 return MIN (ret1 + ret2, prec);
2788 return ret1;
2789 case RSHIFT_EXPR:
2790 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2791 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2793 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2794 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2795 if (ret1 > ret2)
2796 return ret1 - ret2;
2798 return 0;
2799 case TRUNC_DIV_EXPR:
2800 case CEIL_DIV_EXPR:
2801 case FLOOR_DIV_EXPR:
2802 case ROUND_DIV_EXPR:
2803 case EXACT_DIV_EXPR:
2804 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2805 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2807 int l = tree_log2 (TREE_OPERAND (expr, 1));
2808 if (l >= 0)
2810 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2811 ret2 = l;
2812 if (ret1 > ret2)
2813 return ret1 - ret2;
2816 return 0;
2817 CASE_CONVERT:
2818 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2819 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2820 ret1 = prec;
2821 return MIN (ret1, prec);
2822 case SAVE_EXPR:
2823 return tree_ctz (TREE_OPERAND (expr, 0));
2824 case COND_EXPR:
2825 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2826 if (ret1 == 0)
2827 return 0;
2828 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2829 return MIN (ret1, ret2);
2830 case COMPOUND_EXPR:
2831 return tree_ctz (TREE_OPERAND (expr, 1));
2832 case ADDR_EXPR:
2833 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2834 if (ret1 > BITS_PER_UNIT)
2836 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2837 return MIN (ret1, prec);
2839 return 0;
2840 default:
2841 return 0;
2845 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2846 decimal float constants, so don't return 1 for them.
2847 Also return 1 for location wrappers around such a constant. */
2849 bool
2850 real_zerop (const_tree expr)
2852 STRIP_ANY_LOCATION_WRAPPER (expr);
2854 switch (TREE_CODE (expr))
2856 case REAL_CST:
2857 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2858 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2859 case COMPLEX_CST:
2860 return real_zerop (TREE_REALPART (expr))
2861 && real_zerop (TREE_IMAGPART (expr));
2862 case VECTOR_CST:
2864 /* Don't simply check for a duplicate because the predicate
2865 accepts both +0.0 and -0.0. */
2866 unsigned count = vector_cst_encoded_nelts (expr);
2867 for (unsigned int i = 0; i < count; ++i)
2868 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2869 return false;
2870 return true;
2872 default:
2873 return false;
2877 /* Return 1 if EXPR is the real constant one in real or complex form.
2878 Trailing zeroes matter for decimal float constants, so don't return
2879 1 for them.
2880 Also return 1 for location wrappers around such a constant. */
2882 bool
2883 real_onep (const_tree expr)
2885 STRIP_ANY_LOCATION_WRAPPER (expr);
2887 switch (TREE_CODE (expr))
2889 case REAL_CST:
2890 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2891 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2892 case COMPLEX_CST:
2893 return real_onep (TREE_REALPART (expr))
2894 && real_zerop (TREE_IMAGPART (expr));
2895 case VECTOR_CST:
2896 return (VECTOR_CST_NPATTERNS (expr) == 1
2897 && VECTOR_CST_DUPLICATE_P (expr)
2898 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2899 default:
2900 return false;
2904 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2905 matter for decimal float constants, so don't return 1 for them.
2906 Also return 1 for location wrappers around such a constant. */
2908 bool
2909 real_minus_onep (const_tree expr)
2911 STRIP_ANY_LOCATION_WRAPPER (expr);
2913 switch (TREE_CODE (expr))
2915 case REAL_CST:
2916 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2917 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2918 case COMPLEX_CST:
2919 return real_minus_onep (TREE_REALPART (expr))
2920 && real_zerop (TREE_IMAGPART (expr));
2921 case VECTOR_CST:
2922 return (VECTOR_CST_NPATTERNS (expr) == 1
2923 && VECTOR_CST_DUPLICATE_P (expr)
2924 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2925 default:
2926 return false;
2930 /* Nonzero if EXP is a constant or a cast of a constant. */
2932 bool
2933 really_constant_p (const_tree exp)
2935 /* This is not quite the same as STRIP_NOPS. It does more. */
2936 while (CONVERT_EXPR_P (exp)
2937 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2938 exp = TREE_OPERAND (exp, 0);
2939 return TREE_CONSTANT (exp);
2942 /* Return true if T holds a polynomial pointer difference, storing it in
2943 *VALUE if so. A true return means that T's precision is no greater
2944 than 64 bits, which is the largest address space we support, so *VALUE
2945 never loses precision. However, the signedness of the result does
2946 not necessarily match the signedness of T: sometimes an unsigned type
2947 like sizetype is used to encode a value that is actually negative. */
2949 bool
2950 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2952 if (!t)
2953 return false;
2954 if (TREE_CODE (t) == INTEGER_CST)
2956 if (!cst_and_fits_in_hwi (t))
2957 return false;
2958 *value = int_cst_value (t);
2959 return true;
2961 if (POLY_INT_CST_P (t))
2963 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2964 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2965 return false;
2966 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2967 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2968 return true;
2970 return false;
2973 poly_int64
2974 tree_to_poly_int64 (const_tree t)
2976 gcc_assert (tree_fits_poly_int64_p (t));
2977 if (POLY_INT_CST_P (t))
2978 return poly_int_cst_value (t).force_shwi ();
2979 return TREE_INT_CST_LOW (t);
2982 poly_uint64
2983 tree_to_poly_uint64 (const_tree t)
2985 gcc_assert (tree_fits_poly_uint64_p (t));
2986 if (POLY_INT_CST_P (t))
2987 return poly_int_cst_value (t).force_uhwi ();
2988 return TREE_INT_CST_LOW (t);
2991 /* Return first list element whose TREE_VALUE is ELEM.
2992 Return 0 if ELEM is not in LIST. */
2994 tree
2995 value_member (tree elem, tree list)
2997 while (list)
2999 if (elem == TREE_VALUE (list))
3000 return list;
3001 list = TREE_CHAIN (list);
3003 return NULL_TREE;
3006 /* Return first list element whose TREE_PURPOSE is ELEM.
3007 Return 0 if ELEM is not in LIST. */
3009 tree
3010 purpose_member (const_tree elem, tree list)
3012 while (list)
3014 if (elem == TREE_PURPOSE (list))
3015 return list;
3016 list = TREE_CHAIN (list);
3018 return NULL_TREE;
3021 /* Return true if ELEM is in V. */
3023 bool
3024 vec_member (const_tree elem, vec<tree, va_gc> *v)
3026 unsigned ix;
3027 tree t;
3028 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3029 if (elem == t)
3030 return true;
3031 return false;
3034 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3035 NULL_TREE. */
3037 tree
3038 chain_index (int idx, tree chain)
3040 for (; chain && idx > 0; --idx)
3041 chain = TREE_CHAIN (chain);
3042 return chain;
3045 /* Return nonzero if ELEM is part of the chain CHAIN. */
3047 bool
3048 chain_member (const_tree elem, const_tree chain)
3050 while (chain)
3052 if (elem == chain)
3053 return true;
3054 chain = DECL_CHAIN (chain);
3057 return false;
3060 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3061 We expect a null pointer to mark the end of the chain.
3062 This is the Lisp primitive `length'. */
3065 list_length (const_tree t)
3067 const_tree p = t;
3068 #ifdef ENABLE_TREE_CHECKING
3069 const_tree q = t;
3070 #endif
3071 int len = 0;
3073 while (p)
3075 p = TREE_CHAIN (p);
3076 #ifdef ENABLE_TREE_CHECKING
3077 if (len % 2)
3078 q = TREE_CHAIN (q);
3079 gcc_assert (p != q);
3080 #endif
3081 len++;
3084 return len;
3087 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3088 UNION_TYPE TYPE, or NULL_TREE if none. */
3090 tree
3091 first_field (const_tree type)
3093 tree t = TYPE_FIELDS (type);
3094 while (t && TREE_CODE (t) != FIELD_DECL)
3095 t = TREE_CHAIN (t);
3096 return t;
3099 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3100 UNION_TYPE TYPE, or NULL_TREE if none. */
3102 tree
3103 last_field (const_tree type)
3105 tree last = NULL_TREE;
3107 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3109 if (TREE_CODE (fld) != FIELD_DECL)
3110 continue;
3112 last = fld;
3115 return last;
3118 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3119 by modifying the last node in chain 1 to point to chain 2.
3120 This is the Lisp primitive `nconc'. */
3122 tree
3123 chainon (tree op1, tree op2)
3125 tree t1;
3127 if (!op1)
3128 return op2;
3129 if (!op2)
3130 return op1;
3132 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3133 continue;
3134 TREE_CHAIN (t1) = op2;
3136 #ifdef ENABLE_TREE_CHECKING
3138 tree t2;
3139 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3140 gcc_assert (t2 != t1);
3142 #endif
3144 return op1;
3147 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3149 tree
3150 tree_last (tree chain)
3152 tree next;
3153 if (chain)
3154 while ((next = TREE_CHAIN (chain)))
3155 chain = next;
3156 return chain;
3159 /* Reverse the order of elements in the chain T,
3160 and return the new head of the chain (old last element). */
3162 tree
3163 nreverse (tree t)
3165 tree prev = 0, decl, next;
3166 for (decl = t; decl; decl = next)
3168 /* We shouldn't be using this function to reverse BLOCK chains; we
3169 have blocks_nreverse for that. */
3170 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3171 next = TREE_CHAIN (decl);
3172 TREE_CHAIN (decl) = prev;
3173 prev = decl;
3175 return prev;
3178 /* Return a newly created TREE_LIST node whose
3179 purpose and value fields are PARM and VALUE. */
3181 tree
3182 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3184 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3185 TREE_PURPOSE (t) = parm;
3186 TREE_VALUE (t) = value;
3187 return t;
3190 /* Build a chain of TREE_LIST nodes from a vector. */
3192 tree
3193 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3195 tree ret = NULL_TREE;
3196 tree *pp = &ret;
3197 unsigned int i;
3198 tree t;
3199 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3201 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3202 pp = &TREE_CHAIN (*pp);
3204 return ret;
3207 /* Return a newly created TREE_LIST node whose
3208 purpose and value fields are PURPOSE and VALUE
3209 and whose TREE_CHAIN is CHAIN. */
3211 tree
3212 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3214 tree node;
3216 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3217 memset (node, 0, sizeof (struct tree_common));
3219 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3221 TREE_SET_CODE (node, TREE_LIST);
3222 TREE_CHAIN (node) = chain;
3223 TREE_PURPOSE (node) = purpose;
3224 TREE_VALUE (node) = value;
3225 return node;
3228 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3229 trees. */
3231 vec<tree, va_gc> *
3232 ctor_to_vec (tree ctor)
3234 vec<tree, va_gc> *vec;
3235 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3236 unsigned int ix;
3237 tree val;
3239 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3240 vec->quick_push (val);
3242 return vec;
3245 /* Return the size nominally occupied by an object of type TYPE
3246 when it resides in memory. The value is measured in units of bytes,
3247 and its data type is that normally used for type sizes
3248 (which is the first type created by make_signed_type or
3249 make_unsigned_type). */
3251 tree
3252 size_in_bytes_loc (location_t loc, const_tree type)
3254 tree t;
3256 if (type == error_mark_node)
3257 return integer_zero_node;
3259 type = TYPE_MAIN_VARIANT (type);
3260 t = TYPE_SIZE_UNIT (type);
3262 if (t == 0)
3264 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3265 return size_zero_node;
3268 return t;
3271 /* Return the size of TYPE (in bytes) as a wide integer
3272 or return -1 if the size can vary or is larger than an integer. */
3274 HOST_WIDE_INT
3275 int_size_in_bytes (const_tree type)
3277 tree t;
3279 if (type == error_mark_node)
3280 return 0;
3282 type = TYPE_MAIN_VARIANT (type);
3283 t = TYPE_SIZE_UNIT (type);
3285 if (t && tree_fits_uhwi_p (t))
3286 return TREE_INT_CST_LOW (t);
3287 else
3288 return -1;
3291 /* Return the maximum size of TYPE (in bytes) as a wide integer
3292 or return -1 if the size can vary or is larger than an integer. */
3294 HOST_WIDE_INT
3295 max_int_size_in_bytes (const_tree type)
3297 HOST_WIDE_INT size = -1;
3298 tree size_tree;
3300 /* If this is an array type, check for a possible MAX_SIZE attached. */
3302 if (TREE_CODE (type) == ARRAY_TYPE)
3304 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3306 if (size_tree && tree_fits_uhwi_p (size_tree))
3307 size = tree_to_uhwi (size_tree);
3310 /* If we still haven't been able to get a size, see if the language
3311 can compute a maximum size. */
3313 if (size == -1)
3315 size_tree = lang_hooks.types.max_size (type);
3317 if (size_tree && tree_fits_uhwi_p (size_tree))
3318 size = tree_to_uhwi (size_tree);
3321 return size;
3324 /* Return the bit position of FIELD, in bits from the start of the record.
3325 This is a tree of type bitsizetype. */
3327 tree
3328 bit_position (const_tree field)
3330 return bit_from_pos (DECL_FIELD_OFFSET (field),
3331 DECL_FIELD_BIT_OFFSET (field));
3334 /* Return the byte position of FIELD, in bytes from the start of the record.
3335 This is a tree of type sizetype. */
3337 tree
3338 byte_position (const_tree field)
3340 return byte_from_pos (DECL_FIELD_OFFSET (field),
3341 DECL_FIELD_BIT_OFFSET (field));
3344 /* Likewise, but return as an integer. It must be representable in
3345 that way (since it could be a signed value, we don't have the
3346 option of returning -1 like int_size_in_byte can. */
3348 HOST_WIDE_INT
3349 int_byte_position (const_tree field)
3351 return tree_to_shwi (byte_position (field));
3354 /* Return the strictest alignment, in bits, that T is known to have. */
3356 unsigned int
3357 expr_align (const_tree t)
3359 unsigned int align0, align1;
3361 switch (TREE_CODE (t))
3363 CASE_CONVERT: case NON_LVALUE_EXPR:
3364 /* If we have conversions, we know that the alignment of the
3365 object must meet each of the alignments of the types. */
3366 align0 = expr_align (TREE_OPERAND (t, 0));
3367 align1 = TYPE_ALIGN (TREE_TYPE (t));
3368 return MAX (align0, align1);
3370 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3371 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3372 case CLEANUP_POINT_EXPR:
3373 /* These don't change the alignment of an object. */
3374 return expr_align (TREE_OPERAND (t, 0));
3376 case COND_EXPR:
3377 /* The best we can do is say that the alignment is the least aligned
3378 of the two arms. */
3379 align0 = expr_align (TREE_OPERAND (t, 1));
3380 align1 = expr_align (TREE_OPERAND (t, 2));
3381 return MIN (align0, align1);
3383 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3384 meaningfully, it's always 1. */
3385 case LABEL_DECL: case CONST_DECL:
3386 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3387 case FUNCTION_DECL:
3388 gcc_assert (DECL_ALIGN (t) != 0);
3389 return DECL_ALIGN (t);
3391 default:
3392 break;
3395 /* Otherwise take the alignment from that of the type. */
3396 return TYPE_ALIGN (TREE_TYPE (t));
3399 /* Return, as a tree node, the number of elements for TYPE (which is an
3400 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3402 tree
3403 array_type_nelts (const_tree type)
3405 tree index_type, min, max;
3407 /* If they did it with unspecified bounds, then we should have already
3408 given an error about it before we got here. */
3409 if (! TYPE_DOMAIN (type))
3410 return error_mark_node;
3412 index_type = TYPE_DOMAIN (type);
3413 min = TYPE_MIN_VALUE (index_type);
3414 max = TYPE_MAX_VALUE (index_type);
3416 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3417 if (!max)
3418 return error_mark_node;
3420 return (integer_zerop (min)
3421 ? max
3422 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3425 /* If arg is static -- a reference to an object in static storage -- then
3426 return the object. This is not the same as the C meaning of `static'.
3427 If arg isn't static, return NULL. */
3429 tree
3430 staticp (tree arg)
3432 switch (TREE_CODE (arg))
3434 case FUNCTION_DECL:
3435 /* Nested functions are static, even though taking their address will
3436 involve a trampoline as we unnest the nested function and create
3437 the trampoline on the tree level. */
3438 return arg;
3440 case VAR_DECL:
3441 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3442 && ! DECL_THREAD_LOCAL_P (arg)
3443 && ! DECL_DLLIMPORT_P (arg)
3444 ? arg : NULL);
3446 case CONST_DECL:
3447 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3448 ? arg : NULL);
3450 case CONSTRUCTOR:
3451 return TREE_STATIC (arg) ? arg : NULL;
3453 case LABEL_DECL:
3454 case STRING_CST:
3455 return arg;
3457 case COMPONENT_REF:
3458 /* If the thing being referenced is not a field, then it is
3459 something language specific. */
3460 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3462 /* If we are referencing a bitfield, we can't evaluate an
3463 ADDR_EXPR at compile time and so it isn't a constant. */
3464 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3465 return NULL;
3467 return staticp (TREE_OPERAND (arg, 0));
3469 case BIT_FIELD_REF:
3470 return NULL;
3472 case INDIRECT_REF:
3473 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3475 case ARRAY_REF:
3476 case ARRAY_RANGE_REF:
3477 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3478 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3479 return staticp (TREE_OPERAND (arg, 0));
3480 else
3481 return NULL;
3483 case COMPOUND_LITERAL_EXPR:
3484 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3486 default:
3487 return NULL;
3494 /* Return whether OP is a DECL whose address is function-invariant. */
3496 bool
3497 decl_address_invariant_p (const_tree op)
3499 /* The conditions below are slightly less strict than the one in
3500 staticp. */
3502 switch (TREE_CODE (op))
3504 case PARM_DECL:
3505 case RESULT_DECL:
3506 case LABEL_DECL:
3507 case FUNCTION_DECL:
3508 return true;
3510 case VAR_DECL:
3511 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3512 || DECL_THREAD_LOCAL_P (op)
3513 || DECL_CONTEXT (op) == current_function_decl
3514 || decl_function_context (op) == current_function_decl)
3515 return true;
3516 break;
3518 case CONST_DECL:
3519 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3520 || decl_function_context (op) == current_function_decl)
3521 return true;
3522 break;
3524 default:
3525 break;
3528 return false;
3531 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3533 bool
3534 decl_address_ip_invariant_p (const_tree op)
3536 /* The conditions below are slightly less strict than the one in
3537 staticp. */
3539 switch (TREE_CODE (op))
3541 case LABEL_DECL:
3542 case FUNCTION_DECL:
3543 case STRING_CST:
3544 return true;
3546 case VAR_DECL:
3547 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3548 && !DECL_DLLIMPORT_P (op))
3549 || DECL_THREAD_LOCAL_P (op))
3550 return true;
3551 break;
3553 case CONST_DECL:
3554 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3555 return true;
3556 break;
3558 default:
3559 break;
3562 return false;
3566 /* Return true if T is function-invariant (internal function, does
3567 not handle arithmetic; that's handled in skip_simple_arithmetic and
3568 tree_invariant_p). */
3570 static bool
3571 tree_invariant_p_1 (tree t)
3573 tree op;
3575 if (TREE_CONSTANT (t)
3576 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3577 return true;
3579 switch (TREE_CODE (t))
3581 case SAVE_EXPR:
3582 return true;
3584 case ADDR_EXPR:
3585 op = TREE_OPERAND (t, 0);
3586 while (handled_component_p (op))
3588 switch (TREE_CODE (op))
3590 case ARRAY_REF:
3591 case ARRAY_RANGE_REF:
3592 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3593 || TREE_OPERAND (op, 2) != NULL_TREE
3594 || TREE_OPERAND (op, 3) != NULL_TREE)
3595 return false;
3596 break;
3598 case COMPONENT_REF:
3599 if (TREE_OPERAND (op, 2) != NULL_TREE)
3600 return false;
3601 break;
3603 default:;
3605 op = TREE_OPERAND (op, 0);
3608 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3610 default:
3611 break;
3614 return false;
3617 /* Return true if T is function-invariant. */
3619 bool
3620 tree_invariant_p (tree t)
3622 tree inner = skip_simple_arithmetic (t);
3623 return tree_invariant_p_1 (inner);
3626 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3627 Do this to any expression which may be used in more than one place,
3628 but must be evaluated only once.
3630 Normally, expand_expr would reevaluate the expression each time.
3631 Calling save_expr produces something that is evaluated and recorded
3632 the first time expand_expr is called on it. Subsequent calls to
3633 expand_expr just reuse the recorded value.
3635 The call to expand_expr that generates code that actually computes
3636 the value is the first call *at compile time*. Subsequent calls
3637 *at compile time* generate code to use the saved value.
3638 This produces correct result provided that *at run time* control
3639 always flows through the insns made by the first expand_expr
3640 before reaching the other places where the save_expr was evaluated.
3641 You, the caller of save_expr, must make sure this is so.
3643 Constants, and certain read-only nodes, are returned with no
3644 SAVE_EXPR because that is safe. Expressions containing placeholders
3645 are not touched; see tree.def for an explanation of what these
3646 are used for. */
3648 tree
3649 save_expr (tree expr)
3651 tree inner;
3653 /* If the tree evaluates to a constant, then we don't want to hide that
3654 fact (i.e. this allows further folding, and direct checks for constants).
3655 However, a read-only object that has side effects cannot be bypassed.
3656 Since it is no problem to reevaluate literals, we just return the
3657 literal node. */
3658 inner = skip_simple_arithmetic (expr);
3659 if (TREE_CODE (inner) == ERROR_MARK)
3660 return inner;
3662 if (tree_invariant_p_1 (inner))
3663 return expr;
3665 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3666 it means that the size or offset of some field of an object depends on
3667 the value within another field.
3669 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3670 and some variable since it would then need to be both evaluated once and
3671 evaluated more than once. Front-ends must assure this case cannot
3672 happen by surrounding any such subexpressions in their own SAVE_EXPR
3673 and forcing evaluation at the proper time. */
3674 if (contains_placeholder_p (inner))
3675 return expr;
3677 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3679 /* This expression might be placed ahead of a jump to ensure that the
3680 value was computed on both sides of the jump. So make sure it isn't
3681 eliminated as dead. */
3682 TREE_SIDE_EFFECTS (expr) = 1;
3683 return expr;
3686 /* Look inside EXPR into any simple arithmetic operations. Return the
3687 outermost non-arithmetic or non-invariant node. */
3689 tree
3690 skip_simple_arithmetic (tree expr)
3692 /* We don't care about whether this can be used as an lvalue in this
3693 context. */
3694 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3695 expr = TREE_OPERAND (expr, 0);
3697 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3698 a constant, it will be more efficient to not make another SAVE_EXPR since
3699 it will allow better simplification and GCSE will be able to merge the
3700 computations if they actually occur. */
3701 while (true)
3703 if (UNARY_CLASS_P (expr))
3704 expr = TREE_OPERAND (expr, 0);
3705 else if (BINARY_CLASS_P (expr))
3707 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3708 expr = TREE_OPERAND (expr, 0);
3709 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3710 expr = TREE_OPERAND (expr, 1);
3711 else
3712 break;
3714 else
3715 break;
3718 return expr;
3721 /* Look inside EXPR into simple arithmetic operations involving constants.
3722 Return the outermost non-arithmetic or non-constant node. */
3724 tree
3725 skip_simple_constant_arithmetic (tree expr)
3727 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3728 expr = TREE_OPERAND (expr, 0);
3730 while (true)
3732 if (UNARY_CLASS_P (expr))
3733 expr = TREE_OPERAND (expr, 0);
3734 else if (BINARY_CLASS_P (expr))
3736 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3737 expr = TREE_OPERAND (expr, 0);
3738 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3739 expr = TREE_OPERAND (expr, 1);
3740 else
3741 break;
3743 else
3744 break;
3747 return expr;
3750 /* Return which tree structure is used by T. */
3752 enum tree_node_structure_enum
3753 tree_node_structure (const_tree t)
3755 const enum tree_code code = TREE_CODE (t);
3756 return tree_node_structure_for_code (code);
3759 /* Set various status flags when building a CALL_EXPR object T. */
3761 static void
3762 process_call_operands (tree t)
3764 bool side_effects = TREE_SIDE_EFFECTS (t);
3765 bool read_only = false;
3766 int i = call_expr_flags (t);
3768 /* Calls have side-effects, except those to const or pure functions. */
3769 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3770 side_effects = true;
3771 /* Propagate TREE_READONLY of arguments for const functions. */
3772 if (i & ECF_CONST)
3773 read_only = true;
3775 if (!side_effects || read_only)
3776 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3778 tree op = TREE_OPERAND (t, i);
3779 if (op && TREE_SIDE_EFFECTS (op))
3780 side_effects = true;
3781 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3782 read_only = false;
3785 TREE_SIDE_EFFECTS (t) = side_effects;
3786 TREE_READONLY (t) = read_only;
3789 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3790 size or offset that depends on a field within a record. */
3792 bool
3793 contains_placeholder_p (const_tree exp)
3795 enum tree_code code;
3797 if (!exp)
3798 return 0;
3800 code = TREE_CODE (exp);
3801 if (code == PLACEHOLDER_EXPR)
3802 return 1;
3804 switch (TREE_CODE_CLASS (code))
3806 case tcc_reference:
3807 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3808 position computations since they will be converted into a
3809 WITH_RECORD_EXPR involving the reference, which will assume
3810 here will be valid. */
3811 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3813 case tcc_exceptional:
3814 if (code == TREE_LIST)
3815 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3816 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3817 break;
3819 case tcc_unary:
3820 case tcc_binary:
3821 case tcc_comparison:
3822 case tcc_expression:
3823 switch (code)
3825 case COMPOUND_EXPR:
3826 /* Ignoring the first operand isn't quite right, but works best. */
3827 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3829 case COND_EXPR:
3830 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3831 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3832 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3834 case SAVE_EXPR:
3835 /* The save_expr function never wraps anything containing
3836 a PLACEHOLDER_EXPR. */
3837 return 0;
3839 default:
3840 break;
3843 switch (TREE_CODE_LENGTH (code))
3845 case 1:
3846 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3847 case 2:
3848 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3849 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3850 default:
3851 return 0;
3854 case tcc_vl_exp:
3855 switch (code)
3857 case CALL_EXPR:
3859 const_tree arg;
3860 const_call_expr_arg_iterator iter;
3861 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3862 if (CONTAINS_PLACEHOLDER_P (arg))
3863 return 1;
3864 return 0;
3866 default:
3867 return 0;
3870 default:
3871 return 0;
3873 return 0;
3876 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3877 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3878 field positions. */
3880 static bool
3881 type_contains_placeholder_1 (const_tree type)
3883 /* If the size contains a placeholder or the parent type (component type in
3884 the case of arrays) type involves a placeholder, this type does. */
3885 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3886 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3887 || (!POINTER_TYPE_P (type)
3888 && TREE_TYPE (type)
3889 && type_contains_placeholder_p (TREE_TYPE (type))))
3890 return true;
3892 /* Now do type-specific checks. Note that the last part of the check above
3893 greatly limits what we have to do below. */
3894 switch (TREE_CODE (type))
3896 case VOID_TYPE:
3897 case COMPLEX_TYPE:
3898 case ENUMERAL_TYPE:
3899 case BOOLEAN_TYPE:
3900 case POINTER_TYPE:
3901 case OFFSET_TYPE:
3902 case REFERENCE_TYPE:
3903 case METHOD_TYPE:
3904 case FUNCTION_TYPE:
3905 case VECTOR_TYPE:
3906 case NULLPTR_TYPE:
3907 return false;
3909 case INTEGER_TYPE:
3910 case REAL_TYPE:
3911 case FIXED_POINT_TYPE:
3912 /* Here we just check the bounds. */
3913 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3914 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3916 case ARRAY_TYPE:
3917 /* We have already checked the component type above, so just check
3918 the domain type. Flexible array members have a null domain. */
3919 return TYPE_DOMAIN (type) ?
3920 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3922 case RECORD_TYPE:
3923 case UNION_TYPE:
3924 case QUAL_UNION_TYPE:
3926 tree field;
3928 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3929 if (TREE_CODE (field) == FIELD_DECL
3930 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3931 || (TREE_CODE (type) == QUAL_UNION_TYPE
3932 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3933 || type_contains_placeholder_p (TREE_TYPE (field))))
3934 return true;
3936 return false;
3939 default:
3940 gcc_unreachable ();
3944 /* Wrapper around above function used to cache its result. */
3946 bool
3947 type_contains_placeholder_p (tree type)
3949 bool result;
3951 /* If the contains_placeholder_bits field has been initialized,
3952 then we know the answer. */
3953 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3954 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3956 /* Indicate that we've seen this type node, and the answer is false.
3957 This is what we want to return if we run into recursion via fields. */
3958 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3960 /* Compute the real value. */
3961 result = type_contains_placeholder_1 (type);
3963 /* Store the real value. */
3964 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3966 return result;
3969 /* Push tree EXP onto vector QUEUE if it is not already present. */
3971 static void
3972 push_without_duplicates (tree exp, vec<tree> *queue)
3974 unsigned int i;
3975 tree iter;
3977 FOR_EACH_VEC_ELT (*queue, i, iter)
3978 if (simple_cst_equal (iter, exp) == 1)
3979 break;
3981 if (!iter)
3982 queue->safe_push (exp);
3985 /* Given a tree EXP, find all occurrences of references to fields
3986 in a PLACEHOLDER_EXPR and place them in vector REFS without
3987 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3988 we assume here that EXP contains only arithmetic expressions
3989 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3990 argument list. */
3992 void
3993 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3995 enum tree_code code = TREE_CODE (exp);
3996 tree inner;
3997 int i;
3999 /* We handle TREE_LIST and COMPONENT_REF separately. */
4000 if (code == TREE_LIST)
4002 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4003 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4005 else if (code == COMPONENT_REF)
4007 for (inner = TREE_OPERAND (exp, 0);
4008 REFERENCE_CLASS_P (inner);
4009 inner = TREE_OPERAND (inner, 0))
4012 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4013 push_without_duplicates (exp, refs);
4014 else
4015 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4017 else
4018 switch (TREE_CODE_CLASS (code))
4020 case tcc_constant:
4021 break;
4023 case tcc_declaration:
4024 /* Variables allocated to static storage can stay. */
4025 if (!TREE_STATIC (exp))
4026 push_without_duplicates (exp, refs);
4027 break;
4029 case tcc_expression:
4030 /* This is the pattern built in ada/make_aligning_type. */
4031 if (code == ADDR_EXPR
4032 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4034 push_without_duplicates (exp, refs);
4035 break;
4038 /* Fall through. */
4040 case tcc_exceptional:
4041 case tcc_unary:
4042 case tcc_binary:
4043 case tcc_comparison:
4044 case tcc_reference:
4045 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4046 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4047 break;
4049 case tcc_vl_exp:
4050 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4051 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4052 break;
4054 default:
4055 gcc_unreachable ();
4059 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4060 return a tree with all occurrences of references to F in a
4061 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4062 CONST_DECLs. Note that we assume here that EXP contains only
4063 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4064 occurring only in their argument list. */
4066 tree
4067 substitute_in_expr (tree exp, tree f, tree r)
4069 enum tree_code code = TREE_CODE (exp);
4070 tree op0, op1, op2, op3;
4071 tree new_tree;
4073 /* We handle TREE_LIST and COMPONENT_REF separately. */
4074 if (code == TREE_LIST)
4076 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4077 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4078 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4079 return exp;
4081 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4083 else if (code == COMPONENT_REF)
4085 tree inner;
4087 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4088 and it is the right field, replace it with R. */
4089 for (inner = TREE_OPERAND (exp, 0);
4090 REFERENCE_CLASS_P (inner);
4091 inner = TREE_OPERAND (inner, 0))
4094 /* The field. */
4095 op1 = TREE_OPERAND (exp, 1);
4097 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4098 return r;
4100 /* If this expression hasn't been completed let, leave it alone. */
4101 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4102 return exp;
4104 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4105 if (op0 == TREE_OPERAND (exp, 0))
4106 return exp;
4108 new_tree
4109 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4111 else
4112 switch (TREE_CODE_CLASS (code))
4114 case tcc_constant:
4115 return exp;
4117 case tcc_declaration:
4118 if (exp == f)
4119 return r;
4120 else
4121 return exp;
4123 case tcc_expression:
4124 if (exp == f)
4125 return r;
4127 /* Fall through. */
4129 case tcc_exceptional:
4130 case tcc_unary:
4131 case tcc_binary:
4132 case tcc_comparison:
4133 case tcc_reference:
4134 switch (TREE_CODE_LENGTH (code))
4136 case 0:
4137 return exp;
4139 case 1:
4140 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4141 if (op0 == TREE_OPERAND (exp, 0))
4142 return exp;
4144 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4145 break;
4147 case 2:
4148 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4149 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4151 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4152 return exp;
4154 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4155 break;
4157 case 3:
4158 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4159 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4160 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4162 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4163 && op2 == TREE_OPERAND (exp, 2))
4164 return exp;
4166 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4167 break;
4169 case 4:
4170 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4171 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4172 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4173 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4175 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4176 && op2 == TREE_OPERAND (exp, 2)
4177 && op3 == TREE_OPERAND (exp, 3))
4178 return exp;
4180 new_tree
4181 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4182 break;
4184 default:
4185 gcc_unreachable ();
4187 break;
4189 case tcc_vl_exp:
4191 int i;
4193 new_tree = NULL_TREE;
4195 /* If we are trying to replace F with a constant or with another
4196 instance of one of the arguments of the call, inline back
4197 functions which do nothing else than computing a value from
4198 the arguments they are passed. This makes it possible to
4199 fold partially or entirely the replacement expression. */
4200 if (code == CALL_EXPR)
4202 bool maybe_inline = false;
4203 if (CONSTANT_CLASS_P (r))
4204 maybe_inline = true;
4205 else
4206 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4207 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4209 maybe_inline = true;
4210 break;
4212 if (maybe_inline)
4214 tree t = maybe_inline_call_in_expr (exp);
4215 if (t)
4216 return SUBSTITUTE_IN_EXPR (t, f, r);
4220 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4222 tree op = TREE_OPERAND (exp, i);
4223 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4224 if (new_op != op)
4226 if (!new_tree)
4227 new_tree = copy_node (exp);
4228 TREE_OPERAND (new_tree, i) = new_op;
4232 if (new_tree)
4234 new_tree = fold (new_tree);
4235 if (TREE_CODE (new_tree) == CALL_EXPR)
4236 process_call_operands (new_tree);
4238 else
4239 return exp;
4241 break;
4243 default:
4244 gcc_unreachable ();
4247 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4249 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4250 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4252 return new_tree;
4255 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4256 for it within OBJ, a tree that is an object or a chain of references. */
4258 tree
4259 substitute_placeholder_in_expr (tree exp, tree obj)
4261 enum tree_code code = TREE_CODE (exp);
4262 tree op0, op1, op2, op3;
4263 tree new_tree;
4265 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4266 in the chain of OBJ. */
4267 if (code == PLACEHOLDER_EXPR)
4269 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4270 tree elt;
4272 for (elt = obj; elt != 0;
4273 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4274 || TREE_CODE (elt) == COND_EXPR)
4275 ? TREE_OPERAND (elt, 1)
4276 : (REFERENCE_CLASS_P (elt)
4277 || UNARY_CLASS_P (elt)
4278 || BINARY_CLASS_P (elt)
4279 || VL_EXP_CLASS_P (elt)
4280 || EXPRESSION_CLASS_P (elt))
4281 ? TREE_OPERAND (elt, 0) : 0))
4282 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4283 return elt;
4285 for (elt = obj; elt != 0;
4286 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4287 || TREE_CODE (elt) == COND_EXPR)
4288 ? TREE_OPERAND (elt, 1)
4289 : (REFERENCE_CLASS_P (elt)
4290 || UNARY_CLASS_P (elt)
4291 || BINARY_CLASS_P (elt)
4292 || VL_EXP_CLASS_P (elt)
4293 || EXPRESSION_CLASS_P (elt))
4294 ? TREE_OPERAND (elt, 0) : 0))
4295 if (POINTER_TYPE_P (TREE_TYPE (elt))
4296 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4297 == need_type))
4298 return fold_build1 (INDIRECT_REF, need_type, elt);
4300 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4301 survives until RTL generation, there will be an error. */
4302 return exp;
4305 /* TREE_LIST is special because we need to look at TREE_VALUE
4306 and TREE_CHAIN, not TREE_OPERANDS. */
4307 else if (code == TREE_LIST)
4309 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4310 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4311 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4312 return exp;
4314 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4316 else
4317 switch (TREE_CODE_CLASS (code))
4319 case tcc_constant:
4320 case tcc_declaration:
4321 return exp;
4323 case tcc_exceptional:
4324 case tcc_unary:
4325 case tcc_binary:
4326 case tcc_comparison:
4327 case tcc_expression:
4328 case tcc_reference:
4329 case tcc_statement:
4330 switch (TREE_CODE_LENGTH (code))
4332 case 0:
4333 return exp;
4335 case 1:
4336 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4337 if (op0 == TREE_OPERAND (exp, 0))
4338 return exp;
4340 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4341 break;
4343 case 2:
4344 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4345 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4347 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4348 return exp;
4350 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4351 break;
4353 case 3:
4354 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4355 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4356 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4358 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4359 && op2 == TREE_OPERAND (exp, 2))
4360 return exp;
4362 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4363 break;
4365 case 4:
4366 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4367 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4368 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4369 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4371 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4372 && op2 == TREE_OPERAND (exp, 2)
4373 && op3 == TREE_OPERAND (exp, 3))
4374 return exp;
4376 new_tree
4377 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4378 break;
4380 default:
4381 gcc_unreachable ();
4383 break;
4385 case tcc_vl_exp:
4387 int i;
4389 new_tree = NULL_TREE;
4391 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4393 tree op = TREE_OPERAND (exp, i);
4394 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4395 if (new_op != op)
4397 if (!new_tree)
4398 new_tree = copy_node (exp);
4399 TREE_OPERAND (new_tree, i) = new_op;
4403 if (new_tree)
4405 new_tree = fold (new_tree);
4406 if (TREE_CODE (new_tree) == CALL_EXPR)
4407 process_call_operands (new_tree);
4409 else
4410 return exp;
4412 break;
4414 default:
4415 gcc_unreachable ();
4418 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4420 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4421 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4423 return new_tree;
4427 /* Subroutine of stabilize_reference; this is called for subtrees of
4428 references. Any expression with side-effects must be put in a SAVE_EXPR
4429 to ensure that it is only evaluated once.
4431 We don't put SAVE_EXPR nodes around everything, because assigning very
4432 simple expressions to temporaries causes us to miss good opportunities
4433 for optimizations. Among other things, the opportunity to fold in the
4434 addition of a constant into an addressing mode often gets lost, e.g.
4435 "y[i+1] += x;". In general, we take the approach that we should not make
4436 an assignment unless we are forced into it - i.e., that any non-side effect
4437 operator should be allowed, and that cse should take care of coalescing
4438 multiple utterances of the same expression should that prove fruitful. */
4440 static tree
4441 stabilize_reference_1 (tree e)
4443 tree result;
4444 enum tree_code code = TREE_CODE (e);
4446 /* We cannot ignore const expressions because it might be a reference
4447 to a const array but whose index contains side-effects. But we can
4448 ignore things that are actual constant or that already have been
4449 handled by this function. */
4451 if (tree_invariant_p (e))
4452 return e;
4454 switch (TREE_CODE_CLASS (code))
4456 case tcc_exceptional:
4457 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4458 have side-effects. */
4459 if (code == STATEMENT_LIST)
4460 return save_expr (e);
4461 /* FALLTHRU */
4462 case tcc_type:
4463 case tcc_declaration:
4464 case tcc_comparison:
4465 case tcc_statement:
4466 case tcc_expression:
4467 case tcc_reference:
4468 case tcc_vl_exp:
4469 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4470 so that it will only be evaluated once. */
4471 /* The reference (r) and comparison (<) classes could be handled as
4472 below, but it is generally faster to only evaluate them once. */
4473 if (TREE_SIDE_EFFECTS (e))
4474 return save_expr (e);
4475 return e;
4477 case tcc_constant:
4478 /* Constants need no processing. In fact, we should never reach
4479 here. */
4480 return e;
4482 case tcc_binary:
4483 /* Division is slow and tends to be compiled with jumps,
4484 especially the division by powers of 2 that is often
4485 found inside of an array reference. So do it just once. */
4486 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4487 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4488 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4489 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4490 return save_expr (e);
4491 /* Recursively stabilize each operand. */
4492 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4493 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4494 break;
4496 case tcc_unary:
4497 /* Recursively stabilize each operand. */
4498 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4499 break;
4501 default:
4502 gcc_unreachable ();
4505 TREE_TYPE (result) = TREE_TYPE (e);
4506 TREE_READONLY (result) = TREE_READONLY (e);
4507 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4508 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4510 return result;
4513 /* Stabilize a reference so that we can use it any number of times
4514 without causing its operands to be evaluated more than once.
4515 Returns the stabilized reference. This works by means of save_expr,
4516 so see the caveats in the comments about save_expr.
4518 Also allows conversion expressions whose operands are references.
4519 Any other kind of expression is returned unchanged. */
4521 tree
4522 stabilize_reference (tree ref)
4524 tree result;
4525 enum tree_code code = TREE_CODE (ref);
4527 switch (code)
4529 case VAR_DECL:
4530 case PARM_DECL:
4531 case RESULT_DECL:
4532 /* No action is needed in this case. */
4533 return ref;
4535 CASE_CONVERT:
4536 case FLOAT_EXPR:
4537 case FIX_TRUNC_EXPR:
4538 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4539 break;
4541 case INDIRECT_REF:
4542 result = build_nt (INDIRECT_REF,
4543 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4544 break;
4546 case COMPONENT_REF:
4547 result = build_nt (COMPONENT_REF,
4548 stabilize_reference (TREE_OPERAND (ref, 0)),
4549 TREE_OPERAND (ref, 1), NULL_TREE);
4550 break;
4552 case BIT_FIELD_REF:
4553 result = build_nt (BIT_FIELD_REF,
4554 stabilize_reference (TREE_OPERAND (ref, 0)),
4555 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4556 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4557 break;
4559 case ARRAY_REF:
4560 result = build_nt (ARRAY_REF,
4561 stabilize_reference (TREE_OPERAND (ref, 0)),
4562 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4563 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4564 break;
4566 case ARRAY_RANGE_REF:
4567 result = build_nt (ARRAY_RANGE_REF,
4568 stabilize_reference (TREE_OPERAND (ref, 0)),
4569 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4570 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4571 break;
4573 case COMPOUND_EXPR:
4574 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4575 it wouldn't be ignored. This matters when dealing with
4576 volatiles. */
4577 return stabilize_reference_1 (ref);
4579 /* If arg isn't a kind of lvalue we recognize, make no change.
4580 Caller should recognize the error for an invalid lvalue. */
4581 default:
4582 return ref;
4584 case ERROR_MARK:
4585 return error_mark_node;
4588 TREE_TYPE (result) = TREE_TYPE (ref);
4589 TREE_READONLY (result) = TREE_READONLY (ref);
4590 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4591 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4593 return result;
4596 /* Low-level constructors for expressions. */
4598 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4599 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4601 void
4602 recompute_tree_invariant_for_addr_expr (tree t)
4604 tree node;
4605 bool tc = true, se = false;
4607 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4609 /* We started out assuming this address is both invariant and constant, but
4610 does not have side effects. Now go down any handled components and see if
4611 any of them involve offsets that are either non-constant or non-invariant.
4612 Also check for side-effects.
4614 ??? Note that this code makes no attempt to deal with the case where
4615 taking the address of something causes a copy due to misalignment. */
4617 #define UPDATE_FLAGS(NODE) \
4618 do { tree _node = (NODE); \
4619 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4620 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4622 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4623 node = TREE_OPERAND (node, 0))
4625 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4626 array reference (probably made temporarily by the G++ front end),
4627 so ignore all the operands. */
4628 if ((TREE_CODE (node) == ARRAY_REF
4629 || TREE_CODE (node) == ARRAY_RANGE_REF)
4630 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4632 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4633 if (TREE_OPERAND (node, 2))
4634 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4635 if (TREE_OPERAND (node, 3))
4636 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4638 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4639 FIELD_DECL, apparently. The G++ front end can put something else
4640 there, at least temporarily. */
4641 else if (TREE_CODE (node) == COMPONENT_REF
4642 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4644 if (TREE_OPERAND (node, 2))
4645 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4649 node = lang_hooks.expr_to_decl (node, &tc, &se);
4651 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4652 the address, since &(*a)->b is a form of addition. If it's a constant, the
4653 address is constant too. If it's a decl, its address is constant if the
4654 decl is static. Everything else is not constant and, furthermore,
4655 taking the address of a volatile variable is not volatile. */
4656 if (TREE_CODE (node) == INDIRECT_REF
4657 || TREE_CODE (node) == MEM_REF)
4658 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4659 else if (CONSTANT_CLASS_P (node))
4661 else if (DECL_P (node))
4662 tc &= (staticp (node) != NULL_TREE);
4663 else
4665 tc = false;
4666 se |= TREE_SIDE_EFFECTS (node);
4670 TREE_CONSTANT (t) = tc;
4671 TREE_SIDE_EFFECTS (t) = se;
4672 #undef UPDATE_FLAGS
4675 /* Build an expression of code CODE, data type TYPE, and operands as
4676 specified. Expressions and reference nodes can be created this way.
4677 Constants, decls, types and misc nodes cannot be.
4679 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4680 enough for all extant tree codes. */
4682 tree
4683 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4685 tree t;
4687 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4689 t = make_node (code PASS_MEM_STAT);
4690 TREE_TYPE (t) = tt;
4692 return t;
4695 tree
4696 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4698 int length = sizeof (struct tree_exp);
4699 tree t;
4701 record_node_allocation_statistics (code, length);
4703 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4705 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4707 memset (t, 0, sizeof (struct tree_common));
4709 TREE_SET_CODE (t, code);
4711 TREE_TYPE (t) = type;
4712 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4713 TREE_OPERAND (t, 0) = node;
4714 if (node && !TYPE_P (node))
4716 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4717 TREE_READONLY (t) = TREE_READONLY (node);
4720 if (TREE_CODE_CLASS (code) == tcc_statement)
4722 if (code != DEBUG_BEGIN_STMT)
4723 TREE_SIDE_EFFECTS (t) = 1;
4725 else switch (code)
4727 case VA_ARG_EXPR:
4728 /* All of these have side-effects, no matter what their
4729 operands are. */
4730 TREE_SIDE_EFFECTS (t) = 1;
4731 TREE_READONLY (t) = 0;
4732 break;
4734 case INDIRECT_REF:
4735 /* Whether a dereference is readonly has nothing to do with whether
4736 its operand is readonly. */
4737 TREE_READONLY (t) = 0;
4738 break;
4740 case ADDR_EXPR:
4741 if (node)
4742 recompute_tree_invariant_for_addr_expr (t);
4743 break;
4745 default:
4746 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4747 && node && !TYPE_P (node)
4748 && TREE_CONSTANT (node))
4749 TREE_CONSTANT (t) = 1;
4750 if (TREE_CODE_CLASS (code) == tcc_reference
4751 && node && TREE_THIS_VOLATILE (node))
4752 TREE_THIS_VOLATILE (t) = 1;
4753 break;
4756 return t;
4759 #define PROCESS_ARG(N) \
4760 do { \
4761 TREE_OPERAND (t, N) = arg##N; \
4762 if (arg##N &&!TYPE_P (arg##N)) \
4764 if (TREE_SIDE_EFFECTS (arg##N)) \
4765 side_effects = 1; \
4766 if (!TREE_READONLY (arg##N) \
4767 && !CONSTANT_CLASS_P (arg##N)) \
4768 (void) (read_only = 0); \
4769 if (!TREE_CONSTANT (arg##N)) \
4770 (void) (constant = 0); \
4772 } while (0)
4774 tree
4775 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4777 bool constant, read_only, side_effects, div_by_zero;
4778 tree t;
4780 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4782 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4783 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4784 /* When sizetype precision doesn't match that of pointers
4785 we need to be able to build explicit extensions or truncations
4786 of the offset argument. */
4787 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4788 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4789 && TREE_CODE (arg1) == INTEGER_CST);
4791 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4792 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4793 && ptrofftype_p (TREE_TYPE (arg1)));
4795 t = make_node (code PASS_MEM_STAT);
4796 TREE_TYPE (t) = tt;
4798 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4799 result based on those same flags for the arguments. But if the
4800 arguments aren't really even `tree' expressions, we shouldn't be trying
4801 to do this. */
4803 /* Expressions without side effects may be constant if their
4804 arguments are as well. */
4805 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4806 || TREE_CODE_CLASS (code) == tcc_binary);
4807 read_only = 1;
4808 side_effects = TREE_SIDE_EFFECTS (t);
4810 switch (code)
4812 case TRUNC_DIV_EXPR:
4813 case CEIL_DIV_EXPR:
4814 case FLOOR_DIV_EXPR:
4815 case ROUND_DIV_EXPR:
4816 case EXACT_DIV_EXPR:
4817 case CEIL_MOD_EXPR:
4818 case FLOOR_MOD_EXPR:
4819 case ROUND_MOD_EXPR:
4820 case TRUNC_MOD_EXPR:
4821 div_by_zero = integer_zerop (arg1);
4822 break;
4823 default:
4824 div_by_zero = false;
4827 PROCESS_ARG (0);
4828 PROCESS_ARG (1);
4830 TREE_SIDE_EFFECTS (t) = side_effects;
4831 if (code == MEM_REF)
4833 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4835 tree o = TREE_OPERAND (arg0, 0);
4836 TREE_READONLY (t) = TREE_READONLY (o);
4837 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4840 else
4842 TREE_READONLY (t) = read_only;
4843 /* Don't mark X / 0 as constant. */
4844 TREE_CONSTANT (t) = constant && !div_by_zero;
4845 TREE_THIS_VOLATILE (t)
4846 = (TREE_CODE_CLASS (code) == tcc_reference
4847 && arg0 && TREE_THIS_VOLATILE (arg0));
4850 return t;
4854 tree
4855 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4856 tree arg2 MEM_STAT_DECL)
4858 bool constant, read_only, side_effects;
4859 tree t;
4861 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4862 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4864 t = make_node (code PASS_MEM_STAT);
4865 TREE_TYPE (t) = tt;
4867 read_only = 1;
4869 /* As a special exception, if COND_EXPR has NULL branches, we
4870 assume that it is a gimple statement and always consider
4871 it to have side effects. */
4872 if (code == COND_EXPR
4873 && tt == void_type_node
4874 && arg1 == NULL_TREE
4875 && arg2 == NULL_TREE)
4876 side_effects = true;
4877 else
4878 side_effects = TREE_SIDE_EFFECTS (t);
4880 PROCESS_ARG (0);
4881 PROCESS_ARG (1);
4882 PROCESS_ARG (2);
4884 if (code == COND_EXPR)
4885 TREE_READONLY (t) = read_only;
4887 TREE_SIDE_EFFECTS (t) = side_effects;
4888 TREE_THIS_VOLATILE (t)
4889 = (TREE_CODE_CLASS (code) == tcc_reference
4890 && arg0 && TREE_THIS_VOLATILE (arg0));
4892 return t;
4895 tree
4896 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4897 tree arg2, tree arg3 MEM_STAT_DECL)
4899 bool constant, read_only, side_effects;
4900 tree t;
4902 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4904 t = make_node (code PASS_MEM_STAT);
4905 TREE_TYPE (t) = tt;
4907 side_effects = TREE_SIDE_EFFECTS (t);
4909 PROCESS_ARG (0);
4910 PROCESS_ARG (1);
4911 PROCESS_ARG (2);
4912 PROCESS_ARG (3);
4914 TREE_SIDE_EFFECTS (t) = side_effects;
4915 TREE_THIS_VOLATILE (t)
4916 = (TREE_CODE_CLASS (code) == tcc_reference
4917 && arg0 && TREE_THIS_VOLATILE (arg0));
4919 return t;
4922 tree
4923 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4924 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4926 bool constant, read_only, side_effects;
4927 tree t;
4929 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4931 t = make_node (code PASS_MEM_STAT);
4932 TREE_TYPE (t) = tt;
4934 side_effects = TREE_SIDE_EFFECTS (t);
4936 PROCESS_ARG (0);
4937 PROCESS_ARG (1);
4938 PROCESS_ARG (2);
4939 PROCESS_ARG (3);
4940 PROCESS_ARG (4);
4942 TREE_SIDE_EFFECTS (t) = side_effects;
4943 if (code == TARGET_MEM_REF)
4945 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4947 tree o = TREE_OPERAND (arg0, 0);
4948 TREE_READONLY (t) = TREE_READONLY (o);
4949 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4952 else
4953 TREE_THIS_VOLATILE (t)
4954 = (TREE_CODE_CLASS (code) == tcc_reference
4955 && arg0 && TREE_THIS_VOLATILE (arg0));
4957 return t;
4960 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4961 on the pointer PTR. */
4963 tree
4964 build_simple_mem_ref_loc (location_t loc, tree ptr)
4966 poly_int64 offset = 0;
4967 tree ptype = TREE_TYPE (ptr);
4968 tree tem;
4969 /* For convenience allow addresses that collapse to a simple base
4970 and offset. */
4971 if (TREE_CODE (ptr) == ADDR_EXPR
4972 && (handled_component_p (TREE_OPERAND (ptr, 0))
4973 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4975 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4976 gcc_assert (ptr);
4977 if (TREE_CODE (ptr) == MEM_REF)
4979 offset += mem_ref_offset (ptr).force_shwi ();
4980 ptr = TREE_OPERAND (ptr, 0);
4982 else
4983 ptr = build_fold_addr_expr (ptr);
4984 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4986 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4987 ptr, build_int_cst (ptype, offset));
4988 SET_EXPR_LOCATION (tem, loc);
4989 return tem;
4992 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4994 poly_offset_int
4995 mem_ref_offset (const_tree t)
4997 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4998 SIGNED);
5001 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5002 offsetted by OFFSET units. */
5004 tree
5005 build_invariant_address (tree type, tree base, poly_int64 offset)
5007 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5008 build_fold_addr_expr (base),
5009 build_int_cst (ptr_type_node, offset));
5010 tree addr = build1 (ADDR_EXPR, type, ref);
5011 recompute_tree_invariant_for_addr_expr (addr);
5012 return addr;
5015 /* Similar except don't specify the TREE_TYPE
5016 and leave the TREE_SIDE_EFFECTS as 0.
5017 It is permissible for arguments to be null,
5018 or even garbage if their values do not matter. */
5020 tree
5021 build_nt (enum tree_code code, ...)
5023 tree t;
5024 int length;
5025 int i;
5026 va_list p;
5028 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5030 va_start (p, code);
5032 t = make_node (code);
5033 length = TREE_CODE_LENGTH (code);
5035 for (i = 0; i < length; i++)
5036 TREE_OPERAND (t, i) = va_arg (p, tree);
5038 va_end (p);
5039 return t;
5042 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5043 tree vec. */
5045 tree
5046 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5048 tree ret, t;
5049 unsigned int ix;
5051 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5052 CALL_EXPR_FN (ret) = fn;
5053 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5054 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5055 CALL_EXPR_ARG (ret, ix) = t;
5056 return ret;
5059 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5060 and data type TYPE.
5061 We do NOT enter this node in any sort of symbol table.
5063 LOC is the location of the decl.
5065 layout_decl is used to set up the decl's storage layout.
5066 Other slots are initialized to 0 or null pointers. */
5068 tree
5069 build_decl (location_t loc, enum tree_code code, tree name,
5070 tree type MEM_STAT_DECL)
5072 tree t;
5074 t = make_node (code PASS_MEM_STAT);
5075 DECL_SOURCE_LOCATION (t) = loc;
5077 /* if (type == error_mark_node)
5078 type = integer_type_node; */
5079 /* That is not done, deliberately, so that having error_mark_node
5080 as the type can suppress useless errors in the use of this variable. */
5082 DECL_NAME (t) = name;
5083 TREE_TYPE (t) = type;
5085 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5086 layout_decl (t, 0);
5088 return t;
5091 /* Builds and returns function declaration with NAME and TYPE. */
5093 tree
5094 build_fn_decl (const char *name, tree type)
5096 tree id = get_identifier (name);
5097 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5099 DECL_EXTERNAL (decl) = 1;
5100 TREE_PUBLIC (decl) = 1;
5101 DECL_ARTIFICIAL (decl) = 1;
5102 TREE_NOTHROW (decl) = 1;
5104 return decl;
5107 vec<tree, va_gc> *all_translation_units;
5109 /* Builds a new translation-unit decl with name NAME, queues it in the
5110 global list of translation-unit decls and returns it. */
5112 tree
5113 build_translation_unit_decl (tree name)
5115 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5116 name, NULL_TREE);
5117 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5118 vec_safe_push (all_translation_units, tu);
5119 return tu;
5123 /* BLOCK nodes are used to represent the structure of binding contours
5124 and declarations, once those contours have been exited and their contents
5125 compiled. This information is used for outputting debugging info. */
5127 tree
5128 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5130 tree block = make_node (BLOCK);
5132 BLOCK_VARS (block) = vars;
5133 BLOCK_SUBBLOCKS (block) = subblocks;
5134 BLOCK_SUPERCONTEXT (block) = supercontext;
5135 BLOCK_CHAIN (block) = chain;
5136 return block;
5140 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5142 LOC is the location to use in tree T. */
5144 void
5145 protected_set_expr_location (tree t, location_t loc)
5147 if (CAN_HAVE_LOCATION_P (t))
5148 SET_EXPR_LOCATION (t, loc);
5149 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5151 t = expr_single (t);
5152 if (t && CAN_HAVE_LOCATION_P (t))
5153 SET_EXPR_LOCATION (t, loc);
5157 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5158 UNKNOWN_LOCATION. */
5160 void
5161 protected_set_expr_location_if_unset (tree t, location_t loc)
5163 t = expr_single (t);
5164 if (t && !EXPR_HAS_LOCATION (t))
5165 protected_set_expr_location (t, loc);
5168 /* Data used when collecting DECLs and TYPEs for language data removal. */
5170 class free_lang_data_d
5172 public:
5173 free_lang_data_d () : decls (100), types (100) {}
5175 /* Worklist to avoid excessive recursion. */
5176 auto_vec<tree> worklist;
5178 /* Set of traversed objects. Used to avoid duplicate visits. */
5179 hash_set<tree> pset;
5181 /* Array of symbols to process with free_lang_data_in_decl. */
5182 auto_vec<tree> decls;
5184 /* Array of types to process with free_lang_data_in_type. */
5185 auto_vec<tree> types;
5189 /* Add type or decl T to one of the list of tree nodes that need their
5190 language data removed. The lists are held inside FLD. */
5192 static void
5193 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5195 if (DECL_P (t))
5196 fld->decls.safe_push (t);
5197 else if (TYPE_P (t))
5198 fld->types.safe_push (t);
5199 else
5200 gcc_unreachable ();
5203 /* Push tree node T into FLD->WORKLIST. */
5205 static inline void
5206 fld_worklist_push (tree t, class free_lang_data_d *fld)
5208 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5209 fld->worklist.safe_push ((t));
5214 /* Return simplified TYPE_NAME of TYPE. */
5216 static tree
5217 fld_simplified_type_name (tree type)
5219 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5220 return TYPE_NAME (type);
5221 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5222 TYPE_DECL if the type doesn't have linkage.
5223 this must match fld_ */
5224 if (type != TYPE_MAIN_VARIANT (type)
5225 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5226 && (TREE_CODE (type) != RECORD_TYPE
5227 || !TYPE_BINFO (type)
5228 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5229 return DECL_NAME (TYPE_NAME (type));
5230 return TYPE_NAME (type);
5233 /* Do same comparsion as check_qualified_type skipping lang part of type
5234 and be more permissive about type names: we only care that names are
5235 same (for diagnostics) and that ODR names are the same.
5236 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5238 static bool
5239 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5241 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5242 /* We want to match incomplete variants with complete types.
5243 In this case we need to ignore alignment. */
5244 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5245 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5246 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5247 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5248 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5249 TYPE_ATTRIBUTES (v))
5250 || (inner_type && TREE_TYPE (v) != inner_type))
5251 return false;
5253 return true;
5256 /* Find variant of FIRST that match T and create new one if necessary.
5257 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5259 static tree
5260 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5261 tree inner_type = NULL)
5263 if (first == TYPE_MAIN_VARIANT (t))
5264 return t;
5265 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5266 if (fld_type_variant_equal_p (t, v, inner_type))
5267 return v;
5268 tree v = build_variant_type_copy (first);
5269 TYPE_READONLY (v) = TYPE_READONLY (t);
5270 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5271 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5272 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5273 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5274 TYPE_NAME (v) = TYPE_NAME (t);
5275 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5276 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5277 /* Variants of incomplete types should have alignment
5278 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5279 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5281 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5282 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5284 if (inner_type)
5285 TREE_TYPE (v) = inner_type;
5286 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5287 if (!fld->pset.add (v))
5288 add_tree_to_fld_list (v, fld);
5289 return v;
5292 /* Map complete types to incomplete types. */
5294 static hash_map<tree, tree> *fld_incomplete_types;
5296 /* Map types to simplified types. */
5298 static hash_map<tree, tree> *fld_simplified_types;
5300 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5301 use MAP to prevent duplicates. */
5303 static tree
5304 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5305 class free_lang_data_d *fld)
5307 if (TREE_TYPE (t) == t2)
5308 return t;
5310 if (TYPE_MAIN_VARIANT (t) != t)
5312 return fld_type_variant
5313 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5314 TYPE_MAIN_VARIANT (t2), map, fld),
5315 t, fld, t2);
5318 bool existed;
5319 tree &array
5320 = map->get_or_insert (t, &existed);
5321 if (!existed)
5323 array
5324 = build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5325 false, false);
5326 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5327 if (!fld->pset.add (array))
5328 add_tree_to_fld_list (array, fld);
5330 return array;
5333 /* Return CTX after removal of contexts that are not relevant */
5335 static tree
5336 fld_decl_context (tree ctx)
5338 /* Variably modified types are needed for tree_is_indexable to decide
5339 whether the type needs to go to local or global section.
5340 This code is semi-broken but for now it is easiest to keep contexts
5341 as expected. */
5342 if (ctx && TYPE_P (ctx)
5343 && !variably_modified_type_p (ctx, NULL_TREE))
5345 while (ctx && TYPE_P (ctx))
5346 ctx = TYPE_CONTEXT (ctx);
5348 return ctx;
5351 /* For T being aggregate type try to turn it into a incomplete variant.
5352 Return T if no simplification is possible. */
5354 static tree
5355 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5357 if (!t)
5358 return NULL;
5359 if (POINTER_TYPE_P (t))
5361 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5362 if (t2 != TREE_TYPE (t))
5364 tree first;
5365 if (TREE_CODE (t) == POINTER_TYPE)
5366 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5367 TYPE_REF_CAN_ALIAS_ALL (t));
5368 else
5369 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5370 TYPE_REF_CAN_ALIAS_ALL (t));
5371 gcc_assert (TYPE_CANONICAL (t2) != t2
5372 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5373 if (!fld->pset.add (first))
5374 add_tree_to_fld_list (first, fld);
5375 return fld_type_variant (first, t, fld);
5377 return t;
5379 if (TREE_CODE (t) == ARRAY_TYPE)
5380 return fld_process_array_type (t,
5381 fld_incomplete_type_of (TREE_TYPE (t), fld),
5382 fld_incomplete_types, fld);
5383 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5384 || !COMPLETE_TYPE_P (t))
5385 return t;
5386 if (TYPE_MAIN_VARIANT (t) == t)
5388 bool existed;
5389 tree &copy
5390 = fld_incomplete_types->get_or_insert (t, &existed);
5392 if (!existed)
5394 copy = build_distinct_type_copy (t);
5396 /* It is possible that type was not seen by free_lang_data yet. */
5397 if (!fld->pset.add (copy))
5398 add_tree_to_fld_list (copy, fld);
5399 TYPE_SIZE (copy) = NULL;
5400 TYPE_USER_ALIGN (copy) = 0;
5401 TYPE_SIZE_UNIT (copy) = NULL;
5402 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5403 TREE_ADDRESSABLE (copy) = 0;
5404 if (AGGREGATE_TYPE_P (t))
5406 SET_TYPE_MODE (copy, VOIDmode);
5407 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5408 TYPE_TYPELESS_STORAGE (copy) = 0;
5409 TYPE_FIELDS (copy) = NULL;
5410 TYPE_BINFO (copy) = NULL;
5411 TYPE_FINAL_P (copy) = 0;
5412 TYPE_EMPTY_P (copy) = 0;
5414 else
5416 TYPE_VALUES (copy) = NULL;
5417 ENUM_IS_OPAQUE (copy) = 0;
5418 ENUM_IS_SCOPED (copy) = 0;
5421 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5422 This is needed for ODR violation warnings to come out right (we
5423 want duplicate TYPE_DECLs whenever the type is duplicated because
5424 of ODR violation. Because lang data in the TYPE_DECL may not
5425 have been freed yet, rebuild it from scratch and copy relevant
5426 fields. */
5427 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5428 tree name = TYPE_NAME (copy);
5430 if (name && TREE_CODE (name) == TYPE_DECL)
5432 gcc_checking_assert (TREE_TYPE (name) == t);
5433 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5434 DECL_NAME (name), copy);
5435 if (DECL_ASSEMBLER_NAME_SET_P (name))
5436 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5437 SET_DECL_ALIGN (name2, 0);
5438 DECL_CONTEXT (name2) = fld_decl_context
5439 (DECL_CONTEXT (name));
5440 TYPE_NAME (copy) = name2;
5443 return copy;
5445 return (fld_type_variant
5446 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5449 /* Simplify type T for scenarios where we do not need complete pointer
5450 types. */
5452 static tree
5453 fld_simplified_type (tree t, class free_lang_data_d *fld)
5455 if (!t)
5456 return t;
5457 if (POINTER_TYPE_P (t))
5458 return fld_incomplete_type_of (t, fld);
5459 /* FIXME: This triggers verification error, see PR88140. */
5460 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5461 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5462 fld_simplified_types, fld);
5463 return t;
5466 /* Reset the expression *EXPR_P, a size or position.
5468 ??? We could reset all non-constant sizes or positions. But it's cheap
5469 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5471 We need to reset self-referential sizes or positions because they cannot
5472 be gimplified and thus can contain a CALL_EXPR after the gimplification
5473 is finished, which will run afoul of LTO streaming. And they need to be
5474 reset to something essentially dummy but not constant, so as to preserve
5475 the properties of the object they are attached to. */
5477 static inline void
5478 free_lang_data_in_one_sizepos (tree *expr_p)
5480 tree expr = *expr_p;
5481 if (CONTAINS_PLACEHOLDER_P (expr))
5482 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5486 /* Reset all the fields in a binfo node BINFO. We only keep
5487 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5489 static void
5490 free_lang_data_in_binfo (tree binfo)
5492 unsigned i;
5493 tree t;
5495 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5497 BINFO_VIRTUALS (binfo) = NULL_TREE;
5498 BINFO_BASE_ACCESSES (binfo) = NULL;
5499 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5500 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5501 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5502 TREE_PUBLIC (binfo) = 0;
5504 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5505 free_lang_data_in_binfo (t);
5509 /* Reset all language specific information still present in TYPE. */
5511 static void
5512 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5514 gcc_assert (TYPE_P (type));
5516 /* Give the FE a chance to remove its own data first. */
5517 lang_hooks.free_lang_data (type);
5519 TREE_LANG_FLAG_0 (type) = 0;
5520 TREE_LANG_FLAG_1 (type) = 0;
5521 TREE_LANG_FLAG_2 (type) = 0;
5522 TREE_LANG_FLAG_3 (type) = 0;
5523 TREE_LANG_FLAG_4 (type) = 0;
5524 TREE_LANG_FLAG_5 (type) = 0;
5525 TREE_LANG_FLAG_6 (type) = 0;
5527 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5529 /* Purge non-marked variants from the variants chain, so that they
5530 don't reappear in the IL after free_lang_data. */
5531 while (TYPE_NEXT_VARIANT (type)
5532 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5534 tree t = TYPE_NEXT_VARIANT (type);
5535 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5536 /* Turn the removed types into distinct types. */
5537 TYPE_MAIN_VARIANT (t) = t;
5538 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5541 if (TREE_CODE (type) == FUNCTION_TYPE)
5543 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5544 /* Remove the const and volatile qualifiers from arguments. The
5545 C++ front end removes them, but the C front end does not,
5546 leading to false ODR violation errors when merging two
5547 instances of the same function signature compiled by
5548 different front ends. */
5549 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5551 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5552 tree arg_type = TREE_VALUE (p);
5554 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5556 int quals = TYPE_QUALS (arg_type)
5557 & ~TYPE_QUAL_CONST
5558 & ~TYPE_QUAL_VOLATILE;
5559 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5560 if (!fld->pset.add (TREE_VALUE (p)))
5561 free_lang_data_in_type (TREE_VALUE (p), fld);
5563 /* C++ FE uses TREE_PURPOSE to store initial values. */
5564 TREE_PURPOSE (p) = NULL;
5567 else if (TREE_CODE (type) == METHOD_TYPE)
5569 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5570 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5572 /* C++ FE uses TREE_PURPOSE to store initial values. */
5573 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5574 TREE_PURPOSE (p) = NULL;
5577 else if (RECORD_OR_UNION_TYPE_P (type))
5579 /* Remove members that are not FIELD_DECLs from the field list
5580 of an aggregate. These occur in C++. */
5581 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5582 if (TREE_CODE (member) == FIELD_DECL)
5583 prev = &DECL_CHAIN (member);
5584 else
5585 *prev = DECL_CHAIN (member);
5587 TYPE_VFIELD (type) = NULL_TREE;
5589 if (TYPE_BINFO (type))
5591 free_lang_data_in_binfo (TYPE_BINFO (type));
5592 /* We need to preserve link to bases and virtual table for all
5593 polymorphic types to make devirtualization machinery working. */
5594 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5595 TYPE_BINFO (type) = NULL;
5598 else if (INTEGRAL_TYPE_P (type)
5599 || SCALAR_FLOAT_TYPE_P (type)
5600 || FIXED_POINT_TYPE_P (type))
5602 if (TREE_CODE (type) == ENUMERAL_TYPE)
5604 ENUM_IS_OPAQUE (type) = 0;
5605 ENUM_IS_SCOPED (type) = 0;
5606 /* Type values are used only for C++ ODR checking. Drop them
5607 for all type variants and non-ODR types.
5608 For ODR types the data is freed in free_odr_warning_data. */
5609 if (TYPE_MAIN_VARIANT (type) != type
5610 || !type_with_linkage_p (type))
5611 TYPE_VALUES (type) = NULL;
5612 else
5613 /* Simplify representation by recording only values rather
5614 than const decls. */
5615 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5616 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5617 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5619 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5620 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5623 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5625 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5626 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5628 if (TYPE_CONTEXT (type)
5629 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5631 tree ctx = TYPE_CONTEXT (type);
5634 ctx = BLOCK_SUPERCONTEXT (ctx);
5636 while (ctx && TREE_CODE (ctx) == BLOCK);
5637 TYPE_CONTEXT (type) = ctx;
5640 TYPE_STUB_DECL (type) = NULL;
5641 TYPE_NAME (type) = fld_simplified_type_name (type);
5645 /* Return true if DECL may need an assembler name to be set. */
5647 static inline bool
5648 need_assembler_name_p (tree decl)
5650 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5651 Rule merging. This makes type_odr_p to return true on those types during
5652 LTO and by comparing the mangled name, we can say what types are intended
5653 to be equivalent across compilation unit.
5655 We do not store names of type_in_anonymous_namespace_p.
5657 Record, union and enumeration type have linkage that allows use
5658 to check type_in_anonymous_namespace_p. We do not mangle compound types
5659 that always can be compared structurally.
5661 Similarly for builtin types, we compare properties of their main variant.
5662 A special case are integer types where mangling do make differences
5663 between char/signed char/unsigned char etc. Storing name for these makes
5664 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5665 See cp/mangle.c:write_builtin_type for details. */
5667 if (TREE_CODE (decl) == TYPE_DECL)
5669 if (DECL_NAME (decl)
5670 && decl == TYPE_NAME (TREE_TYPE (decl))
5671 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5672 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5673 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5674 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5675 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5676 && (type_with_linkage_p (TREE_TYPE (decl))
5677 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5678 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5679 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5680 return false;
5682 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5683 if (!VAR_OR_FUNCTION_DECL_P (decl))
5684 return false;
5686 /* If DECL already has its assembler name set, it does not need a
5687 new one. */
5688 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5689 || DECL_ASSEMBLER_NAME_SET_P (decl))
5690 return false;
5692 /* Abstract decls do not need an assembler name. */
5693 if (DECL_ABSTRACT_P (decl))
5694 return false;
5696 /* For VAR_DECLs, only static, public and external symbols need an
5697 assembler name. */
5698 if (VAR_P (decl)
5699 && !TREE_STATIC (decl)
5700 && !TREE_PUBLIC (decl)
5701 && !DECL_EXTERNAL (decl))
5702 return false;
5704 if (TREE_CODE (decl) == FUNCTION_DECL)
5706 /* Do not set assembler name on builtins. Allow RTL expansion to
5707 decide whether to expand inline or via a regular call. */
5708 if (fndecl_built_in_p (decl)
5709 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5710 return false;
5712 /* Functions represented in the callgraph need an assembler name. */
5713 if (cgraph_node::get (decl) != NULL)
5714 return true;
5716 /* Unused and not public functions don't need an assembler name. */
5717 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5718 return false;
5721 return true;
5725 /* Reset all language specific information still present in symbol
5726 DECL. */
5728 static void
5729 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5731 gcc_assert (DECL_P (decl));
5733 /* Give the FE a chance to remove its own data first. */
5734 lang_hooks.free_lang_data (decl);
5736 TREE_LANG_FLAG_0 (decl) = 0;
5737 TREE_LANG_FLAG_1 (decl) = 0;
5738 TREE_LANG_FLAG_2 (decl) = 0;
5739 TREE_LANG_FLAG_3 (decl) = 0;
5740 TREE_LANG_FLAG_4 (decl) = 0;
5741 TREE_LANG_FLAG_5 (decl) = 0;
5742 TREE_LANG_FLAG_6 (decl) = 0;
5744 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5745 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5746 if (TREE_CODE (decl) == FIELD_DECL)
5748 DECL_FCONTEXT (decl) = NULL;
5749 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5750 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5751 DECL_QUALIFIER (decl) = NULL_TREE;
5754 if (TREE_CODE (decl) == FUNCTION_DECL)
5756 struct cgraph_node *node;
5757 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5758 the address may be taken in other unit, so this flag has no practical
5759 use for middle-end.
5761 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5762 for public objects that indeed cannot be adressed, but it is not
5763 the case. Set the flag to true so we do not get merge failures for
5764 i.e. virtual tables between units that take address of it and
5765 units that don't. */
5766 if (TREE_PUBLIC (decl))
5767 TREE_ADDRESSABLE (decl) = true;
5768 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5769 if (!(node = cgraph_node::get (decl))
5770 || (!node->definition && !node->clones))
5772 if (node)
5773 node->release_body ();
5774 else
5776 release_function_body (decl);
5777 DECL_ARGUMENTS (decl) = NULL;
5778 DECL_RESULT (decl) = NULL;
5779 DECL_INITIAL (decl) = error_mark_node;
5782 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5784 tree t;
5786 /* If DECL has a gimple body, then the context for its
5787 arguments must be DECL. Otherwise, it doesn't really
5788 matter, as we will not be emitting any code for DECL. In
5789 general, there may be other instances of DECL created by
5790 the front end and since PARM_DECLs are generally shared,
5791 their DECL_CONTEXT changes as the replicas of DECL are
5792 created. The only time where DECL_CONTEXT is important
5793 is for the FUNCTION_DECLs that have a gimple body (since
5794 the PARM_DECL will be used in the function's body). */
5795 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5796 DECL_CONTEXT (t) = decl;
5797 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5798 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5799 = target_option_default_node;
5800 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5801 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5802 = optimization_default_node;
5805 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5806 At this point, it is not needed anymore. */
5807 DECL_SAVED_TREE (decl) = NULL_TREE;
5809 /* Clear the abstract origin if it refers to a method.
5810 Otherwise dwarf2out.c will ICE as we splice functions out of
5811 TYPE_FIELDS and thus the origin will not be output
5812 correctly. */
5813 if (DECL_ABSTRACT_ORIGIN (decl)
5814 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5815 && RECORD_OR_UNION_TYPE_P
5816 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5817 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5819 DECL_VINDEX (decl) = NULL_TREE;
5821 else if (VAR_P (decl))
5823 /* See comment above why we set the flag for functions. */
5824 if (TREE_PUBLIC (decl))
5825 TREE_ADDRESSABLE (decl) = true;
5826 if ((DECL_EXTERNAL (decl)
5827 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5828 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5829 DECL_INITIAL (decl) = NULL_TREE;
5831 else if (TREE_CODE (decl) == TYPE_DECL)
5833 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5834 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5835 TREE_PUBLIC (decl) = 0;
5836 TREE_PRIVATE (decl) = 0;
5837 DECL_ARTIFICIAL (decl) = 0;
5838 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5839 DECL_INITIAL (decl) = NULL_TREE;
5840 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5841 DECL_MODE (decl) = VOIDmode;
5842 SET_DECL_ALIGN (decl, 0);
5843 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5845 else if (TREE_CODE (decl) == FIELD_DECL)
5847 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5848 DECL_INITIAL (decl) = NULL_TREE;
5850 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5851 && DECL_INITIAL (decl)
5852 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5854 /* Strip builtins from the translation-unit BLOCK. We still have targets
5855 without builtin_decl_explicit support and also builtins are shared
5856 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5857 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5858 while (*nextp)
5860 tree var = *nextp;
5861 if (TREE_CODE (var) == FUNCTION_DECL
5862 && fndecl_built_in_p (var))
5863 *nextp = TREE_CHAIN (var);
5864 else
5865 nextp = &TREE_CHAIN (var);
5868 /* We need to keep field decls associated with their trees. Otherwise tree
5869 merging may merge some fileds and keep others disjoint wich in turn will
5870 not do well with TREE_CHAIN pointers linking them.
5872 Also do not drop containing types for virtual methods and tables because
5873 these are needed by devirtualization.
5874 C++ destructors are special because C++ frontends sometimes produces
5875 virtual destructor as an alias of non-virtual destructor. In
5876 devirutalization code we always walk through aliases and we need
5877 context to be preserved too. See PR89335 */
5878 if (TREE_CODE (decl) != FIELD_DECL
5879 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5880 || (!DECL_VIRTUAL_P (decl)
5881 && (TREE_CODE (decl) != FUNCTION_DECL
5882 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5883 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5887 /* Operand callback helper for free_lang_data_in_node. *TP is the
5888 subtree operand being considered. */
5890 static tree
5891 find_decls_types_r (tree *tp, int *ws, void *data)
5893 tree t = *tp;
5894 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5896 if (TREE_CODE (t) == TREE_LIST)
5897 return NULL_TREE;
5899 /* Language specific nodes will be removed, so there is no need
5900 to gather anything under them. */
5901 if (is_lang_specific (t))
5903 *ws = 0;
5904 return NULL_TREE;
5907 if (DECL_P (t))
5909 /* Note that walk_tree does not traverse every possible field in
5910 decls, so we have to do our own traversals here. */
5911 add_tree_to_fld_list (t, fld);
5913 fld_worklist_push (DECL_NAME (t), fld);
5914 fld_worklist_push (DECL_CONTEXT (t), fld);
5915 fld_worklist_push (DECL_SIZE (t), fld);
5916 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5918 /* We are going to remove everything under DECL_INITIAL for
5919 TYPE_DECLs. No point walking them. */
5920 if (TREE_CODE (t) != TYPE_DECL)
5921 fld_worklist_push (DECL_INITIAL (t), fld);
5923 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5924 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5926 if (TREE_CODE (t) == FUNCTION_DECL)
5928 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5929 fld_worklist_push (DECL_RESULT (t), fld);
5931 else if (TREE_CODE (t) == FIELD_DECL)
5933 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5934 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5935 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5936 fld_worklist_push (DECL_FCONTEXT (t), fld);
5939 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5940 && DECL_HAS_VALUE_EXPR_P (t))
5941 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5943 if (TREE_CODE (t) != FIELD_DECL
5944 && TREE_CODE (t) != TYPE_DECL)
5945 fld_worklist_push (TREE_CHAIN (t), fld);
5946 *ws = 0;
5948 else if (TYPE_P (t))
5950 /* Note that walk_tree does not traverse every possible field in
5951 types, so we have to do our own traversals here. */
5952 add_tree_to_fld_list (t, fld);
5954 if (!RECORD_OR_UNION_TYPE_P (t))
5955 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5956 fld_worklist_push (TYPE_SIZE (t), fld);
5957 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5958 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5959 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5960 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5961 fld_worklist_push (TYPE_NAME (t), fld);
5962 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5963 lists, we may look types up in these lists and use them while
5964 optimizing the function body. Thus we need to free lang data
5965 in them. */
5966 if (TREE_CODE (t) == POINTER_TYPE)
5967 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5968 if (TREE_CODE (t) == REFERENCE_TYPE)
5969 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5970 if (!POINTER_TYPE_P (t))
5971 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5972 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5973 if (!RECORD_OR_UNION_TYPE_P (t))
5974 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5975 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5976 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5977 do not and want not to reach unused variants this way. */
5978 if (TYPE_CONTEXT (t))
5980 tree ctx = TYPE_CONTEXT (t);
5981 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5982 So push that instead. */
5983 while (ctx && TREE_CODE (ctx) == BLOCK)
5984 ctx = BLOCK_SUPERCONTEXT (ctx);
5985 fld_worklist_push (ctx, fld);
5987 fld_worklist_push (TYPE_CANONICAL (t), fld);
5989 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5991 unsigned i;
5992 tree tem;
5993 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5994 fld_worklist_push (TREE_TYPE (tem), fld);
5995 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5996 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5998 if (RECORD_OR_UNION_TYPE_P (t))
6000 tree tem;
6001 /* Push all TYPE_FIELDS - there can be interleaving interesting
6002 and non-interesting things. */
6003 tem = TYPE_FIELDS (t);
6004 while (tem)
6006 if (TREE_CODE (tem) == FIELD_DECL)
6007 fld_worklist_push (tem, fld);
6008 tem = TREE_CHAIN (tem);
6011 if (FUNC_OR_METHOD_TYPE_P (t))
6012 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
6014 fld_worklist_push (TYPE_STUB_DECL (t), fld);
6015 *ws = 0;
6017 else if (TREE_CODE (t) == BLOCK)
6019 for (tree *tem = &BLOCK_VARS (t); *tem; )
6021 if (TREE_CODE (*tem) != LABEL_DECL
6022 && (TREE_CODE (*tem) != VAR_DECL
6023 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
6025 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
6026 && TREE_CODE (*tem) != PARM_DECL);
6027 *tem = TREE_CHAIN (*tem);
6029 else
6031 fld_worklist_push (*tem, fld);
6032 tem = &TREE_CHAIN (*tem);
6035 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
6036 fld_worklist_push (tem, fld);
6037 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
6040 if (TREE_CODE (t) != IDENTIFIER_NODE
6041 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
6042 fld_worklist_push (TREE_TYPE (t), fld);
6044 return NULL_TREE;
6048 /* Find decls and types in T. */
6050 static void
6051 find_decls_types (tree t, class free_lang_data_d *fld)
6053 while (1)
6055 if (!fld->pset.contains (t))
6056 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6057 if (fld->worklist.is_empty ())
6058 break;
6059 t = fld->worklist.pop ();
6063 /* Translate all the types in LIST with the corresponding runtime
6064 types. */
6066 static tree
6067 get_eh_types_for_runtime (tree list)
6069 tree head, prev;
6071 if (list == NULL_TREE)
6072 return NULL_TREE;
6074 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6075 prev = head;
6076 list = TREE_CHAIN (list);
6077 while (list)
6079 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6080 TREE_CHAIN (prev) = n;
6081 prev = TREE_CHAIN (prev);
6082 list = TREE_CHAIN (list);
6085 return head;
6089 /* Find decls and types referenced in EH region R and store them in
6090 FLD->DECLS and FLD->TYPES. */
6092 static void
6093 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6095 switch (r->type)
6097 case ERT_CLEANUP:
6098 break;
6100 case ERT_TRY:
6102 eh_catch c;
6104 /* The types referenced in each catch must first be changed to the
6105 EH types used at runtime. This removes references to FE types
6106 in the region. */
6107 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6109 c->type_list = get_eh_types_for_runtime (c->type_list);
6110 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6113 break;
6115 case ERT_ALLOWED_EXCEPTIONS:
6116 r->u.allowed.type_list
6117 = get_eh_types_for_runtime (r->u.allowed.type_list);
6118 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6119 break;
6121 case ERT_MUST_NOT_THROW:
6122 walk_tree (&r->u.must_not_throw.failure_decl,
6123 find_decls_types_r, fld, &fld->pset);
6124 break;
6129 /* Find decls and types referenced in cgraph node N and store them in
6130 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6131 look for *every* kind of DECL and TYPE node reachable from N,
6132 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6133 NAMESPACE_DECLs, etc). */
6135 static void
6136 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6138 basic_block bb;
6139 struct function *fn;
6140 unsigned ix;
6141 tree t;
6143 find_decls_types (n->decl, fld);
6145 if (!gimple_has_body_p (n->decl))
6146 return;
6148 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6150 fn = DECL_STRUCT_FUNCTION (n->decl);
6152 /* Traverse locals. */
6153 FOR_EACH_LOCAL_DECL (fn, ix, t)
6154 find_decls_types (t, fld);
6156 /* Traverse EH regions in FN. */
6158 eh_region r;
6159 FOR_ALL_EH_REGION_FN (r, fn)
6160 find_decls_types_in_eh_region (r, fld);
6163 /* Traverse every statement in FN. */
6164 FOR_EACH_BB_FN (bb, fn)
6166 gphi_iterator psi;
6167 gimple_stmt_iterator si;
6168 unsigned i;
6170 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6172 gphi *phi = psi.phi ();
6174 for (i = 0; i < gimple_phi_num_args (phi); i++)
6176 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6177 find_decls_types (*arg_p, fld);
6181 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6183 gimple *stmt = gsi_stmt (si);
6185 if (is_gimple_call (stmt))
6186 find_decls_types (gimple_call_fntype (stmt), fld);
6188 for (i = 0; i < gimple_num_ops (stmt); i++)
6190 tree arg = gimple_op (stmt, i);
6191 find_decls_types (arg, fld);
6192 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6193 which we need for asm stmts. */
6194 if (arg
6195 && TREE_CODE (arg) == TREE_LIST
6196 && TREE_PURPOSE (arg)
6197 && gimple_code (stmt) == GIMPLE_ASM)
6198 find_decls_types (TREE_PURPOSE (arg), fld);
6205 /* Find decls and types referenced in varpool node N and store them in
6206 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6207 look for *every* kind of DECL and TYPE node reachable from N,
6208 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6209 NAMESPACE_DECLs, etc). */
6211 static void
6212 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6214 find_decls_types (v->decl, fld);
6217 /* If T needs an assembler name, have one created for it. */
6219 void
6220 assign_assembler_name_if_needed (tree t)
6222 if (need_assembler_name_p (t))
6224 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6225 diagnostics that use input_location to show locus
6226 information. The problem here is that, at this point,
6227 input_location is generally anchored to the end of the file
6228 (since the parser is long gone), so we don't have a good
6229 position to pin it to.
6231 To alleviate this problem, this uses the location of T's
6232 declaration. Examples of this are
6233 testsuite/g++.dg/template/cond2.C and
6234 testsuite/g++.dg/template/pr35240.C. */
6235 location_t saved_location = input_location;
6236 input_location = DECL_SOURCE_LOCATION (t);
6238 decl_assembler_name (t);
6240 input_location = saved_location;
6245 /* Free language specific information for every operand and expression
6246 in every node of the call graph. This process operates in three stages:
6248 1- Every callgraph node and varpool node is traversed looking for
6249 decls and types embedded in them. This is a more exhaustive
6250 search than that done by find_referenced_vars, because it will
6251 also collect individual fields, decls embedded in types, etc.
6253 2- All the decls found are sent to free_lang_data_in_decl.
6255 3- All the types found are sent to free_lang_data_in_type.
6257 The ordering between decls and types is important because
6258 free_lang_data_in_decl sets assembler names, which includes
6259 mangling. So types cannot be freed up until assembler names have
6260 been set up. */
6262 static void
6263 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6265 struct cgraph_node *n;
6266 varpool_node *v;
6267 tree t;
6268 unsigned i;
6269 alias_pair *p;
6271 /* Find decls and types in the body of every function in the callgraph. */
6272 FOR_EACH_FUNCTION (n)
6273 find_decls_types_in_node (n, fld);
6275 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6276 find_decls_types (p->decl, fld);
6278 /* Find decls and types in every varpool symbol. */
6279 FOR_EACH_VARIABLE (v)
6280 find_decls_types_in_var (v, fld);
6282 /* Set the assembler name on every decl found. We need to do this
6283 now because free_lang_data_in_decl will invalidate data needed
6284 for mangling. This breaks mangling on interdependent decls. */
6285 FOR_EACH_VEC_ELT (fld->decls, i, t)
6286 assign_assembler_name_if_needed (t);
6288 /* Traverse every decl found freeing its language data. */
6289 FOR_EACH_VEC_ELT (fld->decls, i, t)
6290 free_lang_data_in_decl (t, fld);
6292 /* Traverse every type found freeing its language data. */
6293 FOR_EACH_VEC_ELT (fld->types, i, t)
6294 free_lang_data_in_type (t, fld);
6298 /* Free resources that are used by FE but are not needed once they are done. */
6300 static unsigned
6301 free_lang_data (void)
6303 unsigned i;
6304 class free_lang_data_d fld;
6306 /* If we are the LTO frontend we have freed lang-specific data already. */
6307 if (in_lto_p
6308 || (!flag_generate_lto && !flag_generate_offload))
6310 /* Rebuild type inheritance graph even when not doing LTO to get
6311 consistent profile data. */
6312 rebuild_type_inheritance_graph ();
6313 return 0;
6316 fld_incomplete_types = new hash_map<tree, tree>;
6317 fld_simplified_types = new hash_map<tree, tree>;
6319 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6320 if (vec_safe_is_empty (all_translation_units))
6321 build_translation_unit_decl (NULL_TREE);
6323 /* Allocate and assign alias sets to the standard integer types
6324 while the slots are still in the way the frontends generated them. */
6325 for (i = 0; i < itk_none; ++i)
6326 if (integer_types[i])
6327 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6329 /* Traverse the IL resetting language specific information for
6330 operands, expressions, etc. */
6331 free_lang_data_in_cgraph (&fld);
6333 /* Create gimple variants for common types. */
6334 for (unsigned i = 0;
6335 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6336 ++i)
6337 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6339 /* Reset some langhooks. Do not reset types_compatible_p, it may
6340 still be used indirectly via the get_alias_set langhook. */
6341 lang_hooks.dwarf_name = lhd_dwarf_name;
6342 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6343 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6344 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6345 lang_hooks.print_xnode = lhd_print_tree_nothing;
6346 lang_hooks.print_decl = lhd_print_tree_nothing;
6347 lang_hooks.print_type = lhd_print_tree_nothing;
6348 lang_hooks.print_identifier = lhd_print_tree_nothing;
6350 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6352 if (flag_checking)
6354 int i;
6355 tree t;
6357 FOR_EACH_VEC_ELT (fld.types, i, t)
6358 verify_type (t);
6361 /* We do not want the default decl_assembler_name implementation,
6362 rather if we have fixed everything we want a wrapper around it
6363 asserting that all non-local symbols already got their assembler
6364 name and only produce assembler names for local symbols. Or rather
6365 make sure we never call decl_assembler_name on local symbols and
6366 devise a separate, middle-end private scheme for it. */
6368 /* Reset diagnostic machinery. */
6369 tree_diagnostics_defaults (global_dc);
6371 rebuild_type_inheritance_graph ();
6373 delete fld_incomplete_types;
6374 delete fld_simplified_types;
6376 return 0;
6380 namespace {
6382 const pass_data pass_data_ipa_free_lang_data =
6384 SIMPLE_IPA_PASS, /* type */
6385 "*free_lang_data", /* name */
6386 OPTGROUP_NONE, /* optinfo_flags */
6387 TV_IPA_FREE_LANG_DATA, /* tv_id */
6388 0, /* properties_required */
6389 0, /* properties_provided */
6390 0, /* properties_destroyed */
6391 0, /* todo_flags_start */
6392 0, /* todo_flags_finish */
6395 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6397 public:
6398 pass_ipa_free_lang_data (gcc::context *ctxt)
6399 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6402 /* opt_pass methods: */
6403 virtual unsigned int execute (function *) { return free_lang_data (); }
6405 }; // class pass_ipa_free_lang_data
6407 } // anon namespace
6409 simple_ipa_opt_pass *
6410 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6412 return new pass_ipa_free_lang_data (ctxt);
6415 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6416 of the various TYPE_QUAL values. */
6418 static void
6419 set_type_quals (tree type, int type_quals)
6421 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6422 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6423 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6424 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6425 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6428 /* Returns true iff CAND and BASE have equivalent language-specific
6429 qualifiers. */
6431 bool
6432 check_lang_type (const_tree cand, const_tree base)
6434 if (lang_hooks.types.type_hash_eq == NULL)
6435 return true;
6436 /* type_hash_eq currently only applies to these types. */
6437 if (TREE_CODE (cand) != FUNCTION_TYPE
6438 && TREE_CODE (cand) != METHOD_TYPE)
6439 return true;
6440 return lang_hooks.types.type_hash_eq (cand, base);
6443 /* This function checks to see if TYPE matches the size one of the built-in
6444 atomic types, and returns that core atomic type. */
6446 static tree
6447 find_atomic_core_type (const_tree type)
6449 tree base_atomic_type;
6451 /* Only handle complete types. */
6452 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6453 return NULL_TREE;
6455 switch (tree_to_uhwi (TYPE_SIZE (type)))
6457 case 8:
6458 base_atomic_type = atomicQI_type_node;
6459 break;
6461 case 16:
6462 base_atomic_type = atomicHI_type_node;
6463 break;
6465 case 32:
6466 base_atomic_type = atomicSI_type_node;
6467 break;
6469 case 64:
6470 base_atomic_type = atomicDI_type_node;
6471 break;
6473 case 128:
6474 base_atomic_type = atomicTI_type_node;
6475 break;
6477 default:
6478 base_atomic_type = NULL_TREE;
6481 return base_atomic_type;
6484 /* Returns true iff unqualified CAND and BASE are equivalent. */
6486 bool
6487 check_base_type (const_tree cand, const_tree base)
6489 if (TYPE_NAME (cand) != TYPE_NAME (base)
6490 /* Apparently this is needed for Objective-C. */
6491 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6492 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6493 TYPE_ATTRIBUTES (base)))
6494 return false;
6495 /* Check alignment. */
6496 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6497 return true;
6498 /* Atomic types increase minimal alignment. We must to do so as well
6499 or we get duplicated canonical types. See PR88686. */
6500 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6502 /* See if this object can map to a basic atomic type. */
6503 tree atomic_type = find_atomic_core_type (cand);
6504 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6505 return true;
6507 return false;
6510 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6512 bool
6513 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6515 return (TYPE_QUALS (cand) == type_quals
6516 && check_base_type (cand, base)
6517 && check_lang_type (cand, base));
6520 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6522 static bool
6523 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6525 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6526 && TYPE_NAME (cand) == TYPE_NAME (base)
6527 /* Apparently this is needed for Objective-C. */
6528 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6529 /* Check alignment. */
6530 && TYPE_ALIGN (cand) == align
6531 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6532 TYPE_ATTRIBUTES (base))
6533 && check_lang_type (cand, base));
6536 /* Return a version of the TYPE, qualified as indicated by the
6537 TYPE_QUALS, if one exists. If no qualified version exists yet,
6538 return NULL_TREE. */
6540 tree
6541 get_qualified_type (tree type, int type_quals)
6543 if (TYPE_QUALS (type) == type_quals)
6544 return type;
6546 tree mv = TYPE_MAIN_VARIANT (type);
6547 if (check_qualified_type (mv, type, type_quals))
6548 return mv;
6550 /* Search the chain of variants to see if there is already one there just
6551 like the one we need to have. If so, use that existing one. We must
6552 preserve the TYPE_NAME, since there is code that depends on this. */
6553 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6554 if (check_qualified_type (*tp, type, type_quals))
6556 /* Put the found variant at the head of the variant list so
6557 frequently searched variants get found faster. The C++ FE
6558 benefits greatly from this. */
6559 tree t = *tp;
6560 *tp = TYPE_NEXT_VARIANT (t);
6561 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6562 TYPE_NEXT_VARIANT (mv) = t;
6563 return t;
6566 return NULL_TREE;
6569 /* Like get_qualified_type, but creates the type if it does not
6570 exist. This function never returns NULL_TREE. */
6572 tree
6573 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6575 tree t;
6577 /* See if we already have the appropriate qualified variant. */
6578 t = get_qualified_type (type, type_quals);
6580 /* If not, build it. */
6581 if (!t)
6583 t = build_variant_type_copy (type PASS_MEM_STAT);
6584 set_type_quals (t, type_quals);
6586 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6588 /* See if this object can map to a basic atomic type. */
6589 tree atomic_type = find_atomic_core_type (type);
6590 if (atomic_type)
6592 /* Ensure the alignment of this type is compatible with
6593 the required alignment of the atomic type. */
6594 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6595 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6599 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6600 /* Propagate structural equality. */
6601 SET_TYPE_STRUCTURAL_EQUALITY (t);
6602 else if (TYPE_CANONICAL (type) != type)
6603 /* Build the underlying canonical type, since it is different
6604 from TYPE. */
6606 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6607 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6609 else
6610 /* T is its own canonical type. */
6611 TYPE_CANONICAL (t) = t;
6615 return t;
6618 /* Create a variant of type T with alignment ALIGN. */
6620 tree
6621 build_aligned_type (tree type, unsigned int align)
6623 tree t;
6625 if (TYPE_PACKED (type)
6626 || TYPE_ALIGN (type) == align)
6627 return type;
6629 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6630 if (check_aligned_type (t, type, align))
6631 return t;
6633 t = build_variant_type_copy (type);
6634 SET_TYPE_ALIGN (t, align);
6635 TYPE_USER_ALIGN (t) = 1;
6637 return t;
6640 /* Create a new distinct copy of TYPE. The new type is made its own
6641 MAIN_VARIANT. If TYPE requires structural equality checks, the
6642 resulting type requires structural equality checks; otherwise, its
6643 TYPE_CANONICAL points to itself. */
6645 tree
6646 build_distinct_type_copy (tree type MEM_STAT_DECL)
6648 tree t = copy_node (type PASS_MEM_STAT);
6650 TYPE_POINTER_TO (t) = 0;
6651 TYPE_REFERENCE_TO (t) = 0;
6653 /* Set the canonical type either to a new equivalence class, or
6654 propagate the need for structural equality checks. */
6655 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6656 SET_TYPE_STRUCTURAL_EQUALITY (t);
6657 else
6658 TYPE_CANONICAL (t) = t;
6660 /* Make it its own variant. */
6661 TYPE_MAIN_VARIANT (t) = t;
6662 TYPE_NEXT_VARIANT (t) = 0;
6664 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6665 whose TREE_TYPE is not t. This can also happen in the Ada
6666 frontend when using subtypes. */
6668 return t;
6671 /* Create a new variant of TYPE, equivalent but distinct. This is so
6672 the caller can modify it. TYPE_CANONICAL for the return type will
6673 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6674 are considered equal by the language itself (or that both types
6675 require structural equality checks). */
6677 tree
6678 build_variant_type_copy (tree type MEM_STAT_DECL)
6680 tree t, m = TYPE_MAIN_VARIANT (type);
6682 t = build_distinct_type_copy (type PASS_MEM_STAT);
6684 /* Since we're building a variant, assume that it is a non-semantic
6685 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6686 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6687 /* Type variants have no alias set defined. */
6688 TYPE_ALIAS_SET (t) = -1;
6690 /* Add the new type to the chain of variants of TYPE. */
6691 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6692 TYPE_NEXT_VARIANT (m) = t;
6693 TYPE_MAIN_VARIANT (t) = m;
6695 return t;
6698 /* Return true if the from tree in both tree maps are equal. */
6701 tree_map_base_eq (const void *va, const void *vb)
6703 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6704 *const b = (const struct tree_map_base *) vb;
6705 return (a->from == b->from);
6708 /* Hash a from tree in a tree_base_map. */
6710 unsigned int
6711 tree_map_base_hash (const void *item)
6713 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6716 /* Return true if this tree map structure is marked for garbage collection
6717 purposes. We simply return true if the from tree is marked, so that this
6718 structure goes away when the from tree goes away. */
6721 tree_map_base_marked_p (const void *p)
6723 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6726 /* Hash a from tree in a tree_map. */
6728 unsigned int
6729 tree_map_hash (const void *item)
6731 return (((const struct tree_map *) item)->hash);
6734 /* Hash a from tree in a tree_decl_map. */
6736 unsigned int
6737 tree_decl_map_hash (const void *item)
6739 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6742 /* Return the initialization priority for DECL. */
6744 priority_type
6745 decl_init_priority_lookup (tree decl)
6747 symtab_node *snode = symtab_node::get (decl);
6749 if (!snode)
6750 return DEFAULT_INIT_PRIORITY;
6751 return
6752 snode->get_init_priority ();
6755 /* Return the finalization priority for DECL. */
6757 priority_type
6758 decl_fini_priority_lookup (tree decl)
6760 cgraph_node *node = cgraph_node::get (decl);
6762 if (!node)
6763 return DEFAULT_INIT_PRIORITY;
6764 return
6765 node->get_fini_priority ();
6768 /* Set the initialization priority for DECL to PRIORITY. */
6770 void
6771 decl_init_priority_insert (tree decl, priority_type priority)
6773 struct symtab_node *snode;
6775 if (priority == DEFAULT_INIT_PRIORITY)
6777 snode = symtab_node::get (decl);
6778 if (!snode)
6779 return;
6781 else if (VAR_P (decl))
6782 snode = varpool_node::get_create (decl);
6783 else
6784 snode = cgraph_node::get_create (decl);
6785 snode->set_init_priority (priority);
6788 /* Set the finalization priority for DECL to PRIORITY. */
6790 void
6791 decl_fini_priority_insert (tree decl, priority_type priority)
6793 struct cgraph_node *node;
6795 if (priority == DEFAULT_INIT_PRIORITY)
6797 node = cgraph_node::get (decl);
6798 if (!node)
6799 return;
6801 else
6802 node = cgraph_node::get_create (decl);
6803 node->set_fini_priority (priority);
6806 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6808 static void
6809 print_debug_expr_statistics (void)
6811 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6812 (long) debug_expr_for_decl->size (),
6813 (long) debug_expr_for_decl->elements (),
6814 debug_expr_for_decl->collisions ());
6817 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6819 static void
6820 print_value_expr_statistics (void)
6822 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6823 (long) value_expr_for_decl->size (),
6824 (long) value_expr_for_decl->elements (),
6825 value_expr_for_decl->collisions ());
6828 /* Lookup a debug expression for FROM, and return it if we find one. */
6830 tree
6831 decl_debug_expr_lookup (tree from)
6833 struct tree_decl_map *h, in;
6834 in.base.from = from;
6836 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6837 if (h)
6838 return h->to;
6839 return NULL_TREE;
6842 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6844 void
6845 decl_debug_expr_insert (tree from, tree to)
6847 struct tree_decl_map *h;
6849 h = ggc_alloc<tree_decl_map> ();
6850 h->base.from = from;
6851 h->to = to;
6852 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6855 /* Lookup a value expression for FROM, and return it if we find one. */
6857 tree
6858 decl_value_expr_lookup (tree from)
6860 struct tree_decl_map *h, in;
6861 in.base.from = from;
6863 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6864 if (h)
6865 return h->to;
6866 return NULL_TREE;
6869 /* Insert a mapping FROM->TO in the value expression hashtable. */
6871 void
6872 decl_value_expr_insert (tree from, tree to)
6874 struct tree_decl_map *h;
6876 h = ggc_alloc<tree_decl_map> ();
6877 h->base.from = from;
6878 h->to = to;
6879 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6882 /* Lookup a vector of debug arguments for FROM, and return it if we
6883 find one. */
6885 vec<tree, va_gc> **
6886 decl_debug_args_lookup (tree from)
6888 struct tree_vec_map *h, in;
6890 if (!DECL_HAS_DEBUG_ARGS_P (from))
6891 return NULL;
6892 gcc_checking_assert (debug_args_for_decl != NULL);
6893 in.base.from = from;
6894 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6895 if (h)
6896 return &h->to;
6897 return NULL;
6900 /* Insert a mapping FROM->empty vector of debug arguments in the value
6901 expression hashtable. */
6903 vec<tree, va_gc> **
6904 decl_debug_args_insert (tree from)
6906 struct tree_vec_map *h;
6907 tree_vec_map **loc;
6909 if (DECL_HAS_DEBUG_ARGS_P (from))
6910 return decl_debug_args_lookup (from);
6911 if (debug_args_for_decl == NULL)
6912 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6913 h = ggc_alloc<tree_vec_map> ();
6914 h->base.from = from;
6915 h->to = NULL;
6916 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6917 *loc = h;
6918 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6919 return &h->to;
6922 /* Hashing of types so that we don't make duplicates.
6923 The entry point is `type_hash_canon'. */
6925 /* Generate the default hash code for TYPE. This is designed for
6926 speed, rather than maximum entropy. */
6928 hashval_t
6929 type_hash_canon_hash (tree type)
6931 inchash::hash hstate;
6933 hstate.add_int (TREE_CODE (type));
6935 if (TREE_TYPE (type))
6936 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6938 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6939 /* Just the identifier is adequate to distinguish. */
6940 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6942 switch (TREE_CODE (type))
6944 case METHOD_TYPE:
6945 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6946 /* FALLTHROUGH. */
6947 case FUNCTION_TYPE:
6948 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6949 if (TREE_VALUE (t) != error_mark_node)
6950 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6951 break;
6953 case OFFSET_TYPE:
6954 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6955 break;
6957 case ARRAY_TYPE:
6959 if (TYPE_DOMAIN (type))
6960 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6961 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6963 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6964 hstate.add_object (typeless);
6967 break;
6969 case INTEGER_TYPE:
6971 tree t = TYPE_MAX_VALUE (type);
6972 if (!t)
6973 t = TYPE_MIN_VALUE (type);
6974 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6975 hstate.add_object (TREE_INT_CST_ELT (t, i));
6976 break;
6979 case REAL_TYPE:
6980 case FIXED_POINT_TYPE:
6982 unsigned prec = TYPE_PRECISION (type);
6983 hstate.add_object (prec);
6984 break;
6987 case VECTOR_TYPE:
6988 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6989 break;
6991 default:
6992 break;
6995 return hstate.end ();
6998 /* These are the Hashtable callback functions. */
7000 /* Returns true iff the types are equivalent. */
7002 bool
7003 type_cache_hasher::equal (type_hash *a, type_hash *b)
7005 /* First test the things that are the same for all types. */
7006 if (a->hash != b->hash
7007 || TREE_CODE (a->type) != TREE_CODE (b->type)
7008 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7009 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7010 TYPE_ATTRIBUTES (b->type))
7011 || (TREE_CODE (a->type) != COMPLEX_TYPE
7012 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7013 return 0;
7015 /* Be careful about comparing arrays before and after the element type
7016 has been completed; don't compare TYPE_ALIGN unless both types are
7017 complete. */
7018 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7019 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7020 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7021 return 0;
7023 switch (TREE_CODE (a->type))
7025 case VOID_TYPE:
7026 case COMPLEX_TYPE:
7027 case POINTER_TYPE:
7028 case REFERENCE_TYPE:
7029 case NULLPTR_TYPE:
7030 return 1;
7032 case VECTOR_TYPE:
7033 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
7034 TYPE_VECTOR_SUBPARTS (b->type));
7036 case ENUMERAL_TYPE:
7037 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7038 && !(TYPE_VALUES (a->type)
7039 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7040 && TYPE_VALUES (b->type)
7041 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7042 && type_list_equal (TYPE_VALUES (a->type),
7043 TYPE_VALUES (b->type))))
7044 return 0;
7046 /* fall through */
7048 case INTEGER_TYPE:
7049 case REAL_TYPE:
7050 case BOOLEAN_TYPE:
7051 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7052 return false;
7053 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7054 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7055 TYPE_MAX_VALUE (b->type)))
7056 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7057 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7058 TYPE_MIN_VALUE (b->type))));
7060 case FIXED_POINT_TYPE:
7061 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7063 case OFFSET_TYPE:
7064 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7066 case METHOD_TYPE:
7067 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7068 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7069 || (TYPE_ARG_TYPES (a->type)
7070 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7071 && TYPE_ARG_TYPES (b->type)
7072 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7073 && type_list_equal (TYPE_ARG_TYPES (a->type),
7074 TYPE_ARG_TYPES (b->type)))))
7075 break;
7076 return 0;
7077 case ARRAY_TYPE:
7078 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7079 where the flag should be inherited from the element type
7080 and can change after ARRAY_TYPEs are created; on non-aggregates
7081 compare it and hash it, scalars will never have that flag set
7082 and we need to differentiate between arrays created by different
7083 front-ends or middle-end created arrays. */
7084 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7085 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7086 || (TYPE_TYPELESS_STORAGE (a->type)
7087 == TYPE_TYPELESS_STORAGE (b->type))));
7089 case RECORD_TYPE:
7090 case UNION_TYPE:
7091 case QUAL_UNION_TYPE:
7092 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7093 || (TYPE_FIELDS (a->type)
7094 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7095 && TYPE_FIELDS (b->type)
7096 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7097 && type_list_equal (TYPE_FIELDS (a->type),
7098 TYPE_FIELDS (b->type))));
7100 case FUNCTION_TYPE:
7101 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7102 || (TYPE_ARG_TYPES (a->type)
7103 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7104 && TYPE_ARG_TYPES (b->type)
7105 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7106 && type_list_equal (TYPE_ARG_TYPES (a->type),
7107 TYPE_ARG_TYPES (b->type))))
7108 break;
7109 return 0;
7111 default:
7112 return 0;
7115 if (lang_hooks.types.type_hash_eq != NULL)
7116 return lang_hooks.types.type_hash_eq (a->type, b->type);
7118 return 1;
7121 /* Given TYPE, and HASHCODE its hash code, return the canonical
7122 object for an identical type if one already exists.
7123 Otherwise, return TYPE, and record it as the canonical object.
7125 To use this function, first create a type of the sort you want.
7126 Then compute its hash code from the fields of the type that
7127 make it different from other similar types.
7128 Then call this function and use the value. */
7130 tree
7131 type_hash_canon (unsigned int hashcode, tree type)
7133 type_hash in;
7134 type_hash **loc;
7136 /* The hash table only contains main variants, so ensure that's what we're
7137 being passed. */
7138 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7140 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7141 must call that routine before comparing TYPE_ALIGNs. */
7142 layout_type (type);
7144 in.hash = hashcode;
7145 in.type = type;
7147 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7148 if (*loc)
7150 tree t1 = ((type_hash *) *loc)->type;
7151 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7152 && t1 != type);
7153 if (TYPE_UID (type) + 1 == next_type_uid)
7154 --next_type_uid;
7155 /* Free also min/max values and the cache for integer
7156 types. This can't be done in free_node, as LTO frees
7157 those on its own. */
7158 if (TREE_CODE (type) == INTEGER_TYPE)
7160 if (TYPE_MIN_VALUE (type)
7161 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7163 /* Zero is always in TYPE_CACHED_VALUES. */
7164 if (! TYPE_UNSIGNED (type))
7165 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7166 ggc_free (TYPE_MIN_VALUE (type));
7168 if (TYPE_MAX_VALUE (type)
7169 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7171 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7172 ggc_free (TYPE_MAX_VALUE (type));
7174 if (TYPE_CACHED_VALUES_P (type))
7175 ggc_free (TYPE_CACHED_VALUES (type));
7177 free_node (type);
7178 return t1;
7180 else
7182 struct type_hash *h;
7184 h = ggc_alloc<type_hash> ();
7185 h->hash = hashcode;
7186 h->type = type;
7187 *loc = h;
7189 return type;
7193 static void
7194 print_type_hash_statistics (void)
7196 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7197 (long) type_hash_table->size (),
7198 (long) type_hash_table->elements (),
7199 type_hash_table->collisions ());
7202 /* Given two lists of types
7203 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7204 return 1 if the lists contain the same types in the same order.
7205 Also, the TREE_PURPOSEs must match. */
7207 bool
7208 type_list_equal (const_tree l1, const_tree l2)
7210 const_tree t1, t2;
7212 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7213 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7214 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7215 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7216 && (TREE_TYPE (TREE_PURPOSE (t1))
7217 == TREE_TYPE (TREE_PURPOSE (t2))))))
7218 return false;
7220 return t1 == t2;
7223 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7224 given by TYPE. If the argument list accepts variable arguments,
7225 then this function counts only the ordinary arguments. */
7228 type_num_arguments (const_tree fntype)
7230 int i = 0;
7232 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7233 /* If the function does not take a variable number of arguments,
7234 the last element in the list will have type `void'. */
7235 if (VOID_TYPE_P (TREE_VALUE (t)))
7236 break;
7237 else
7238 ++i;
7240 return i;
7243 /* Return the type of the function TYPE's argument ARGNO if known.
7244 For vararg function's where ARGNO refers to one of the variadic
7245 arguments return null. Otherwise, return a void_type_node for
7246 out-of-bounds ARGNO. */
7248 tree
7249 type_argument_type (const_tree fntype, unsigned argno)
7251 /* Treat zero the same as an out-of-bounds argument number. */
7252 if (!argno)
7253 return void_type_node;
7255 function_args_iterator iter;
7257 tree argtype;
7258 unsigned i = 1;
7259 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7261 /* A vararg function's argument list ends in a null. Otherwise,
7262 an ordinary function's argument list ends with void. Return
7263 null if ARGNO refers to a vararg argument, void_type_node if
7264 it's out of bounds, and the formal argument type otherwise. */
7265 if (!argtype)
7266 break;
7268 if (i == argno || VOID_TYPE_P (argtype))
7269 return argtype;
7271 ++i;
7274 return NULL_TREE;
7277 /* Nonzero if integer constants T1 and T2
7278 represent the same constant value. */
7281 tree_int_cst_equal (const_tree t1, const_tree t2)
7283 if (t1 == t2)
7284 return 1;
7286 if (t1 == 0 || t2 == 0)
7287 return 0;
7289 STRIP_ANY_LOCATION_WRAPPER (t1);
7290 STRIP_ANY_LOCATION_WRAPPER (t2);
7292 if (TREE_CODE (t1) == INTEGER_CST
7293 && TREE_CODE (t2) == INTEGER_CST
7294 && wi::to_widest (t1) == wi::to_widest (t2))
7295 return 1;
7297 return 0;
7300 /* Return true if T is an INTEGER_CST whose numerical value (extended
7301 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7303 bool
7304 tree_fits_shwi_p (const_tree t)
7306 return (t != NULL_TREE
7307 && TREE_CODE (t) == INTEGER_CST
7308 && wi::fits_shwi_p (wi::to_widest (t)));
7311 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7312 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7314 bool
7315 tree_fits_poly_int64_p (const_tree t)
7317 if (t == NULL_TREE)
7318 return false;
7319 if (POLY_INT_CST_P (t))
7321 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7322 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7323 return false;
7324 return true;
7326 return (TREE_CODE (t) == INTEGER_CST
7327 && wi::fits_shwi_p (wi::to_widest (t)));
7330 /* Return true if T is an INTEGER_CST whose numerical value (extended
7331 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7333 bool
7334 tree_fits_uhwi_p (const_tree t)
7336 return (t != NULL_TREE
7337 && TREE_CODE (t) == INTEGER_CST
7338 && wi::fits_uhwi_p (wi::to_widest (t)));
7341 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7342 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7344 bool
7345 tree_fits_poly_uint64_p (const_tree t)
7347 if (t == NULL_TREE)
7348 return false;
7349 if (POLY_INT_CST_P (t))
7351 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7352 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7353 return false;
7354 return true;
7356 return (TREE_CODE (t) == INTEGER_CST
7357 && wi::fits_uhwi_p (wi::to_widest (t)));
7360 /* T is an INTEGER_CST whose numerical value (extended according to
7361 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7362 HOST_WIDE_INT. */
7364 HOST_WIDE_INT
7365 tree_to_shwi (const_tree t)
7367 gcc_assert (tree_fits_shwi_p (t));
7368 return TREE_INT_CST_LOW (t);
7371 /* T is an INTEGER_CST whose numerical value (extended according to
7372 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7373 HOST_WIDE_INT. */
7375 unsigned HOST_WIDE_INT
7376 tree_to_uhwi (const_tree t)
7378 gcc_assert (tree_fits_uhwi_p (t));
7379 return TREE_INT_CST_LOW (t);
7382 /* Return the most significant (sign) bit of T. */
7385 tree_int_cst_sign_bit (const_tree t)
7387 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7389 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7392 /* Return an indication of the sign of the integer constant T.
7393 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7394 Note that -1 will never be returned if T's type is unsigned. */
7397 tree_int_cst_sgn (const_tree t)
7399 if (wi::to_wide (t) == 0)
7400 return 0;
7401 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7402 return 1;
7403 else if (wi::neg_p (wi::to_wide (t)))
7404 return -1;
7405 else
7406 return 1;
7409 /* Return the minimum number of bits needed to represent VALUE in a
7410 signed or unsigned type, UNSIGNEDP says which. */
7412 unsigned int
7413 tree_int_cst_min_precision (tree value, signop sgn)
7415 /* If the value is negative, compute its negative minus 1. The latter
7416 adjustment is because the absolute value of the largest negative value
7417 is one larger than the largest positive value. This is equivalent to
7418 a bit-wise negation, so use that operation instead. */
7420 if (tree_int_cst_sgn (value) < 0)
7421 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7423 /* Return the number of bits needed, taking into account the fact
7424 that we need one more bit for a signed than unsigned type.
7425 If value is 0 or -1, the minimum precision is 1 no matter
7426 whether unsignedp is true or false. */
7428 if (integer_zerop (value))
7429 return 1;
7430 else
7431 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7434 /* Return truthvalue of whether T1 is the same tree structure as T2.
7435 Return 1 if they are the same.
7436 Return 0 if they are understandably different.
7437 Return -1 if either contains tree structure not understood by
7438 this function. */
7441 simple_cst_equal (const_tree t1, const_tree t2)
7443 enum tree_code code1, code2;
7444 int cmp;
7445 int i;
7447 if (t1 == t2)
7448 return 1;
7449 if (t1 == 0 || t2 == 0)
7450 return 0;
7452 /* For location wrappers to be the same, they must be at the same
7453 source location (and wrap the same thing). */
7454 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7456 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7457 return 0;
7458 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7461 code1 = TREE_CODE (t1);
7462 code2 = TREE_CODE (t2);
7464 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7466 if (CONVERT_EXPR_CODE_P (code2)
7467 || code2 == NON_LVALUE_EXPR)
7468 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7469 else
7470 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7473 else if (CONVERT_EXPR_CODE_P (code2)
7474 || code2 == NON_LVALUE_EXPR)
7475 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7477 if (code1 != code2)
7478 return 0;
7480 switch (code1)
7482 case INTEGER_CST:
7483 return wi::to_widest (t1) == wi::to_widest (t2);
7485 case REAL_CST:
7486 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7488 case FIXED_CST:
7489 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7491 case STRING_CST:
7492 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7493 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7494 TREE_STRING_LENGTH (t1)));
7496 case CONSTRUCTOR:
7498 unsigned HOST_WIDE_INT idx;
7499 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7500 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7502 if (vec_safe_length (v1) != vec_safe_length (v2))
7503 return false;
7505 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7506 /* ??? Should we handle also fields here? */
7507 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7508 return false;
7509 return true;
7512 case SAVE_EXPR:
7513 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7515 case CALL_EXPR:
7516 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7517 if (cmp <= 0)
7518 return cmp;
7519 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7520 return 0;
7522 const_tree arg1, arg2;
7523 const_call_expr_arg_iterator iter1, iter2;
7524 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7525 arg2 = first_const_call_expr_arg (t2, &iter2);
7526 arg1 && arg2;
7527 arg1 = next_const_call_expr_arg (&iter1),
7528 arg2 = next_const_call_expr_arg (&iter2))
7530 cmp = simple_cst_equal (arg1, arg2);
7531 if (cmp <= 0)
7532 return cmp;
7534 return arg1 == arg2;
7537 case TARGET_EXPR:
7538 /* Special case: if either target is an unallocated VAR_DECL,
7539 it means that it's going to be unified with whatever the
7540 TARGET_EXPR is really supposed to initialize, so treat it
7541 as being equivalent to anything. */
7542 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7543 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7544 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7545 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7546 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7547 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7548 cmp = 1;
7549 else
7550 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7552 if (cmp <= 0)
7553 return cmp;
7555 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7557 case WITH_CLEANUP_EXPR:
7558 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7559 if (cmp <= 0)
7560 return cmp;
7562 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7564 case COMPONENT_REF:
7565 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7566 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7568 return 0;
7570 case VAR_DECL:
7571 case PARM_DECL:
7572 case CONST_DECL:
7573 case FUNCTION_DECL:
7574 return 0;
7576 default:
7577 if (POLY_INT_CST_P (t1))
7578 /* A false return means maybe_ne rather than known_ne. */
7579 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7580 TYPE_SIGN (TREE_TYPE (t1))),
7581 poly_widest_int::from (poly_int_cst_value (t2),
7582 TYPE_SIGN (TREE_TYPE (t2))));
7583 break;
7586 /* This general rule works for most tree codes. All exceptions should be
7587 handled above. If this is a language-specific tree code, we can't
7588 trust what might be in the operand, so say we don't know
7589 the situation. */
7590 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7591 return -1;
7593 switch (TREE_CODE_CLASS (code1))
7595 case tcc_unary:
7596 case tcc_binary:
7597 case tcc_comparison:
7598 case tcc_expression:
7599 case tcc_reference:
7600 case tcc_statement:
7601 cmp = 1;
7602 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7604 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7605 if (cmp <= 0)
7606 return cmp;
7609 return cmp;
7611 default:
7612 return -1;
7616 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7617 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7618 than U, respectively. */
7621 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7623 if (tree_int_cst_sgn (t) < 0)
7624 return -1;
7625 else if (!tree_fits_uhwi_p (t))
7626 return 1;
7627 else if (TREE_INT_CST_LOW (t) == u)
7628 return 0;
7629 else if (TREE_INT_CST_LOW (t) < u)
7630 return -1;
7631 else
7632 return 1;
7635 /* Return true if SIZE represents a constant size that is in bounds of
7636 what the middle-end and the backend accepts (covering not more than
7637 half of the address-space).
7638 When PERR is non-null, set *PERR on failure to the description of
7639 why SIZE is not valid. */
7641 bool
7642 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7644 if (POLY_INT_CST_P (size))
7646 if (TREE_OVERFLOW (size))
7647 return false;
7648 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7649 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7650 return false;
7651 return true;
7654 cst_size_error error;
7655 if (!perr)
7656 perr = &error;
7658 if (TREE_CODE (size) != INTEGER_CST)
7660 *perr = cst_size_not_constant;
7661 return false;
7664 if (TREE_OVERFLOW_P (size))
7666 *perr = cst_size_overflow;
7667 return false;
7670 if (tree_int_cst_sgn (size) < 0)
7672 *perr = cst_size_negative;
7673 return false;
7675 if (!tree_fits_uhwi_p (size)
7676 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7677 < wi::to_widest (size) * 2))
7679 *perr = cst_size_too_big;
7680 return false;
7683 return true;
7686 /* Return the precision of the type, or for a complex or vector type the
7687 precision of the type of its elements. */
7689 unsigned int
7690 element_precision (const_tree type)
7692 if (!TYPE_P (type))
7693 type = TREE_TYPE (type);
7694 enum tree_code code = TREE_CODE (type);
7695 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7696 type = TREE_TYPE (type);
7698 return TYPE_PRECISION (type);
7701 /* Return true if CODE represents an associative tree code. Otherwise
7702 return false. */
7703 bool
7704 associative_tree_code (enum tree_code code)
7706 switch (code)
7708 case BIT_IOR_EXPR:
7709 case BIT_AND_EXPR:
7710 case BIT_XOR_EXPR:
7711 case PLUS_EXPR:
7712 case MULT_EXPR:
7713 case MIN_EXPR:
7714 case MAX_EXPR:
7715 return true;
7717 default:
7718 break;
7720 return false;
7723 /* Return true if CODE represents a commutative tree code. Otherwise
7724 return false. */
7725 bool
7726 commutative_tree_code (enum tree_code code)
7728 switch (code)
7730 case PLUS_EXPR:
7731 case MULT_EXPR:
7732 case MULT_HIGHPART_EXPR:
7733 case MIN_EXPR:
7734 case MAX_EXPR:
7735 case BIT_IOR_EXPR:
7736 case BIT_XOR_EXPR:
7737 case BIT_AND_EXPR:
7738 case NE_EXPR:
7739 case EQ_EXPR:
7740 case UNORDERED_EXPR:
7741 case ORDERED_EXPR:
7742 case UNEQ_EXPR:
7743 case LTGT_EXPR:
7744 case TRUTH_AND_EXPR:
7745 case TRUTH_XOR_EXPR:
7746 case TRUTH_OR_EXPR:
7747 case WIDEN_MULT_EXPR:
7748 case VEC_WIDEN_MULT_HI_EXPR:
7749 case VEC_WIDEN_MULT_LO_EXPR:
7750 case VEC_WIDEN_MULT_EVEN_EXPR:
7751 case VEC_WIDEN_MULT_ODD_EXPR:
7752 return true;
7754 default:
7755 break;
7757 return false;
7760 /* Return true if CODE represents a ternary tree code for which the
7761 first two operands are commutative. Otherwise return false. */
7762 bool
7763 commutative_ternary_tree_code (enum tree_code code)
7765 switch (code)
7767 case WIDEN_MULT_PLUS_EXPR:
7768 case WIDEN_MULT_MINUS_EXPR:
7769 case DOT_PROD_EXPR:
7770 return true;
7772 default:
7773 break;
7775 return false;
7778 /* Returns true if CODE can overflow. */
7780 bool
7781 operation_can_overflow (enum tree_code code)
7783 switch (code)
7785 case PLUS_EXPR:
7786 case MINUS_EXPR:
7787 case MULT_EXPR:
7788 case LSHIFT_EXPR:
7789 /* Can overflow in various ways. */
7790 return true;
7791 case TRUNC_DIV_EXPR:
7792 case EXACT_DIV_EXPR:
7793 case FLOOR_DIV_EXPR:
7794 case CEIL_DIV_EXPR:
7795 /* For INT_MIN / -1. */
7796 return true;
7797 case NEGATE_EXPR:
7798 case ABS_EXPR:
7799 /* For -INT_MIN. */
7800 return true;
7801 default:
7802 /* These operators cannot overflow. */
7803 return false;
7807 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7808 ftrapv doesn't generate trapping insns for CODE. */
7810 bool
7811 operation_no_trapping_overflow (tree type, enum tree_code code)
7813 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7815 /* We don't generate instructions that trap on overflow for complex or vector
7816 types. */
7817 if (!INTEGRAL_TYPE_P (type))
7818 return true;
7820 if (!TYPE_OVERFLOW_TRAPS (type))
7821 return true;
7823 switch (code)
7825 case PLUS_EXPR:
7826 case MINUS_EXPR:
7827 case MULT_EXPR:
7828 case NEGATE_EXPR:
7829 case ABS_EXPR:
7830 /* These operators can overflow, and -ftrapv generates trapping code for
7831 these. */
7832 return false;
7833 case TRUNC_DIV_EXPR:
7834 case EXACT_DIV_EXPR:
7835 case FLOOR_DIV_EXPR:
7836 case CEIL_DIV_EXPR:
7837 case LSHIFT_EXPR:
7838 /* These operators can overflow, but -ftrapv does not generate trapping
7839 code for these. */
7840 return true;
7841 default:
7842 /* These operators cannot overflow. */
7843 return true;
7847 /* Constructors for pointer, array and function types.
7848 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7849 constructed by language-dependent code, not here.) */
7851 /* Construct, lay out and return the type of pointers to TO_TYPE with
7852 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7853 reference all of memory. If such a type has already been
7854 constructed, reuse it. */
7856 tree
7857 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7858 bool can_alias_all)
7860 tree t;
7861 bool could_alias = can_alias_all;
7863 if (to_type == error_mark_node)
7864 return error_mark_node;
7866 /* If the pointed-to type has the may_alias attribute set, force
7867 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7868 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7869 can_alias_all = true;
7871 /* In some cases, languages will have things that aren't a POINTER_TYPE
7872 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7873 In that case, return that type without regard to the rest of our
7874 operands.
7876 ??? This is a kludge, but consistent with the way this function has
7877 always operated and there doesn't seem to be a good way to avoid this
7878 at the moment. */
7879 if (TYPE_POINTER_TO (to_type) != 0
7880 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7881 return TYPE_POINTER_TO (to_type);
7883 /* First, if we already have a type for pointers to TO_TYPE and it's
7884 the proper mode, use it. */
7885 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7886 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7887 return t;
7889 t = make_node (POINTER_TYPE);
7891 TREE_TYPE (t) = to_type;
7892 SET_TYPE_MODE (t, mode);
7893 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7894 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7895 TYPE_POINTER_TO (to_type) = t;
7897 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7898 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7899 SET_TYPE_STRUCTURAL_EQUALITY (t);
7900 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7901 TYPE_CANONICAL (t)
7902 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7903 mode, false);
7905 /* Lay out the type. This function has many callers that are concerned
7906 with expression-construction, and this simplifies them all. */
7907 layout_type (t);
7909 return t;
7912 /* By default build pointers in ptr_mode. */
7914 tree
7915 build_pointer_type (tree to_type)
7917 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7918 : TYPE_ADDR_SPACE (to_type);
7919 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7920 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7923 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7925 tree
7926 build_reference_type_for_mode (tree to_type, machine_mode mode,
7927 bool can_alias_all)
7929 tree t;
7930 bool could_alias = can_alias_all;
7932 if (to_type == error_mark_node)
7933 return error_mark_node;
7935 /* If the pointed-to type has the may_alias attribute set, force
7936 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7937 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7938 can_alias_all = true;
7940 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7941 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7942 In that case, return that type without regard to the rest of our
7943 operands.
7945 ??? This is a kludge, but consistent with the way this function has
7946 always operated and there doesn't seem to be a good way to avoid this
7947 at the moment. */
7948 if (TYPE_REFERENCE_TO (to_type) != 0
7949 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7950 return TYPE_REFERENCE_TO (to_type);
7952 /* First, if we already have a type for pointers to TO_TYPE and it's
7953 the proper mode, use it. */
7954 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7955 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7956 return t;
7958 t = make_node (REFERENCE_TYPE);
7960 TREE_TYPE (t) = to_type;
7961 SET_TYPE_MODE (t, mode);
7962 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7963 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7964 TYPE_REFERENCE_TO (to_type) = t;
7966 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7967 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7968 SET_TYPE_STRUCTURAL_EQUALITY (t);
7969 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7970 TYPE_CANONICAL (t)
7971 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7972 mode, false);
7974 layout_type (t);
7976 return t;
7980 /* Build the node for the type of references-to-TO_TYPE by default
7981 in ptr_mode. */
7983 tree
7984 build_reference_type (tree to_type)
7986 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7987 : TYPE_ADDR_SPACE (to_type);
7988 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7989 return build_reference_type_for_mode (to_type, pointer_mode, false);
7992 #define MAX_INT_CACHED_PREC \
7993 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7994 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7996 /* Builds a signed or unsigned integer type of precision PRECISION.
7997 Used for C bitfields whose precision does not match that of
7998 built-in target types. */
7999 tree
8000 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8001 int unsignedp)
8003 tree itype, ret;
8005 if (unsignedp)
8006 unsignedp = MAX_INT_CACHED_PREC + 1;
8008 if (precision <= MAX_INT_CACHED_PREC)
8010 itype = nonstandard_integer_type_cache[precision + unsignedp];
8011 if (itype)
8012 return itype;
8015 itype = make_node (INTEGER_TYPE);
8016 TYPE_PRECISION (itype) = precision;
8018 if (unsignedp)
8019 fixup_unsigned_type (itype);
8020 else
8021 fixup_signed_type (itype);
8023 inchash::hash hstate;
8024 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8025 ret = type_hash_canon (hstate.end (), itype);
8026 if (precision <= MAX_INT_CACHED_PREC)
8027 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8029 return ret;
8032 #define MAX_BOOL_CACHED_PREC \
8033 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8034 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8036 /* Builds a boolean type of precision PRECISION.
8037 Used for boolean vectors to choose proper vector element size. */
8038 tree
8039 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8041 tree type;
8043 if (precision <= MAX_BOOL_CACHED_PREC)
8045 type = nonstandard_boolean_type_cache[precision];
8046 if (type)
8047 return type;
8050 type = make_node (BOOLEAN_TYPE);
8051 TYPE_PRECISION (type) = precision;
8052 fixup_signed_type (type);
8054 if (precision <= MAX_INT_CACHED_PREC)
8055 nonstandard_boolean_type_cache[precision] = type;
8057 return type;
8060 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8061 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8062 is true, reuse such a type that has already been constructed. */
8064 static tree
8065 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8067 tree itype = make_node (INTEGER_TYPE);
8069 TREE_TYPE (itype) = type;
8071 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8072 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8074 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8075 SET_TYPE_MODE (itype, TYPE_MODE (type));
8076 TYPE_SIZE (itype) = TYPE_SIZE (type);
8077 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8078 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8079 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8080 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8082 if (!shared)
8083 return itype;
8085 if ((TYPE_MIN_VALUE (itype)
8086 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8087 || (TYPE_MAX_VALUE (itype)
8088 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8090 /* Since we cannot reliably merge this type, we need to compare it using
8091 structural equality checks. */
8092 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8093 return itype;
8096 hashval_t hash = type_hash_canon_hash (itype);
8097 itype = type_hash_canon (hash, itype);
8099 return itype;
8102 /* Wrapper around build_range_type_1 with SHARED set to true. */
8104 tree
8105 build_range_type (tree type, tree lowval, tree highval)
8107 return build_range_type_1 (type, lowval, highval, true);
8110 /* Wrapper around build_range_type_1 with SHARED set to false. */
8112 tree
8113 build_nonshared_range_type (tree type, tree lowval, tree highval)
8115 return build_range_type_1 (type, lowval, highval, false);
8118 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8119 MAXVAL should be the maximum value in the domain
8120 (one less than the length of the array).
8122 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8123 We don't enforce this limit, that is up to caller (e.g. language front end).
8124 The limit exists because the result is a signed type and we don't handle
8125 sizes that use more than one HOST_WIDE_INT. */
8127 tree
8128 build_index_type (tree maxval)
8130 return build_range_type (sizetype, size_zero_node, maxval);
8133 /* Return true if the debug information for TYPE, a subtype, should be emitted
8134 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8135 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8136 debug info and doesn't reflect the source code. */
8138 bool
8139 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8141 tree base_type = TREE_TYPE (type), low, high;
8143 /* Subrange types have a base type which is an integral type. */
8144 if (!INTEGRAL_TYPE_P (base_type))
8145 return false;
8147 /* Get the real bounds of the subtype. */
8148 if (lang_hooks.types.get_subrange_bounds)
8149 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8150 else
8152 low = TYPE_MIN_VALUE (type);
8153 high = TYPE_MAX_VALUE (type);
8156 /* If the type and its base type have the same representation and the same
8157 name, then the type is not a subrange but a copy of the base type. */
8158 if ((TREE_CODE (base_type) == INTEGER_TYPE
8159 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8160 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8161 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8162 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8163 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8164 return false;
8166 if (lowval)
8167 *lowval = low;
8168 if (highval)
8169 *highval = high;
8170 return true;
8173 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8174 and number of elements specified by the range of values of INDEX_TYPE.
8175 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8176 If SHARED is true, reuse such a type that has already been constructed.
8177 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
8179 static tree
8180 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8181 bool shared, bool set_canonical)
8183 tree t;
8185 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8187 error ("arrays of functions are not meaningful");
8188 elt_type = integer_type_node;
8191 t = make_node (ARRAY_TYPE);
8192 TREE_TYPE (t) = elt_type;
8193 TYPE_DOMAIN (t) = index_type;
8194 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8195 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8196 layout_type (t);
8198 if (shared)
8200 hashval_t hash = type_hash_canon_hash (t);
8201 t = type_hash_canon (hash, t);
8204 if (TYPE_CANONICAL (t) == t && set_canonical)
8206 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8207 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8208 || in_lto_p)
8209 SET_TYPE_STRUCTURAL_EQUALITY (t);
8210 else if (TYPE_CANONICAL (elt_type) != elt_type
8211 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8212 TYPE_CANONICAL (t)
8213 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8214 index_type
8215 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8216 typeless_storage, shared, set_canonical);
8219 return t;
8222 /* Wrapper around build_array_type_1 with SHARED set to true. */
8224 tree
8225 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8227 return
8228 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8231 /* Wrapper around build_array_type_1 with SHARED set to false. */
8233 tree
8234 build_nonshared_array_type (tree elt_type, tree index_type)
8236 return build_array_type_1 (elt_type, index_type, false, false, true);
8239 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8240 sizetype. */
8242 tree
8243 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8245 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8248 /* Recursively examines the array elements of TYPE, until a non-array
8249 element type is found. */
8251 tree
8252 strip_array_types (tree type)
8254 while (TREE_CODE (type) == ARRAY_TYPE)
8255 type = TREE_TYPE (type);
8257 return type;
8260 /* Computes the canonical argument types from the argument type list
8261 ARGTYPES.
8263 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8264 on entry to this function, or if any of the ARGTYPES are
8265 structural.
8267 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8268 true on entry to this function, or if any of the ARGTYPES are
8269 non-canonical.
8271 Returns a canonical argument list, which may be ARGTYPES when the
8272 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8273 true) or would not differ from ARGTYPES. */
8275 static tree
8276 maybe_canonicalize_argtypes (tree argtypes,
8277 bool *any_structural_p,
8278 bool *any_noncanonical_p)
8280 tree arg;
8281 bool any_noncanonical_argtypes_p = false;
8283 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8285 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8286 /* Fail gracefully by stating that the type is structural. */
8287 *any_structural_p = true;
8288 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8289 *any_structural_p = true;
8290 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8291 || TREE_PURPOSE (arg))
8292 /* If the argument has a default argument, we consider it
8293 non-canonical even though the type itself is canonical.
8294 That way, different variants of function and method types
8295 with default arguments will all point to the variant with
8296 no defaults as their canonical type. */
8297 any_noncanonical_argtypes_p = true;
8300 if (*any_structural_p)
8301 return argtypes;
8303 if (any_noncanonical_argtypes_p)
8305 /* Build the canonical list of argument types. */
8306 tree canon_argtypes = NULL_TREE;
8307 bool is_void = false;
8309 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8311 if (arg == void_list_node)
8312 is_void = true;
8313 else
8314 canon_argtypes = tree_cons (NULL_TREE,
8315 TYPE_CANONICAL (TREE_VALUE (arg)),
8316 canon_argtypes);
8319 canon_argtypes = nreverse (canon_argtypes);
8320 if (is_void)
8321 canon_argtypes = chainon (canon_argtypes, void_list_node);
8323 /* There is a non-canonical type. */
8324 *any_noncanonical_p = true;
8325 return canon_argtypes;
8328 /* The canonical argument types are the same as ARGTYPES. */
8329 return argtypes;
8332 /* Construct, lay out and return
8333 the type of functions returning type VALUE_TYPE
8334 given arguments of types ARG_TYPES.
8335 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8336 are data type nodes for the arguments of the function.
8337 If such a type has already been constructed, reuse it. */
8339 tree
8340 build_function_type (tree value_type, tree arg_types)
8342 tree t;
8343 inchash::hash hstate;
8344 bool any_structural_p, any_noncanonical_p;
8345 tree canon_argtypes;
8347 gcc_assert (arg_types != error_mark_node);
8349 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8351 error ("function return type cannot be function");
8352 value_type = integer_type_node;
8355 /* Make a node of the sort we want. */
8356 t = make_node (FUNCTION_TYPE);
8357 TREE_TYPE (t) = value_type;
8358 TYPE_ARG_TYPES (t) = arg_types;
8360 /* If we already have such a type, use the old one. */
8361 hashval_t hash = type_hash_canon_hash (t);
8362 t = type_hash_canon (hash, t);
8364 /* Set up the canonical type. */
8365 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8366 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8367 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8368 &any_structural_p,
8369 &any_noncanonical_p);
8370 if (any_structural_p)
8371 SET_TYPE_STRUCTURAL_EQUALITY (t);
8372 else if (any_noncanonical_p)
8373 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8374 canon_argtypes);
8376 if (!COMPLETE_TYPE_P (t))
8377 layout_type (t);
8378 return t;
8381 /* Build a function type. The RETURN_TYPE is the type returned by the
8382 function. If VAARGS is set, no void_type_node is appended to the
8383 list. ARGP must be always be terminated be a NULL_TREE. */
8385 static tree
8386 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8388 tree t, args, last;
8390 t = va_arg (argp, tree);
8391 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8392 args = tree_cons (NULL_TREE, t, args);
8394 if (vaargs)
8396 last = args;
8397 if (args != NULL_TREE)
8398 args = nreverse (args);
8399 gcc_assert (last != void_list_node);
8401 else if (args == NULL_TREE)
8402 args = void_list_node;
8403 else
8405 last = args;
8406 args = nreverse (args);
8407 TREE_CHAIN (last) = void_list_node;
8409 args = build_function_type (return_type, args);
8411 return args;
8414 /* Build a function type. The RETURN_TYPE is the type returned by the
8415 function. If additional arguments are provided, they are
8416 additional argument types. The list of argument types must always
8417 be terminated by NULL_TREE. */
8419 tree
8420 build_function_type_list (tree return_type, ...)
8422 tree args;
8423 va_list p;
8425 va_start (p, return_type);
8426 args = build_function_type_list_1 (false, return_type, p);
8427 va_end (p);
8428 return args;
8431 /* Build a variable argument function type. The RETURN_TYPE is the
8432 type returned by the function. If additional arguments are provided,
8433 they are additional argument types. The list of argument types must
8434 always be terminated by NULL_TREE. */
8436 tree
8437 build_varargs_function_type_list (tree return_type, ...)
8439 tree args;
8440 va_list p;
8442 va_start (p, return_type);
8443 args = build_function_type_list_1 (true, return_type, p);
8444 va_end (p);
8446 return args;
8449 /* Build a function type. RETURN_TYPE is the type returned by the
8450 function; VAARGS indicates whether the function takes varargs. The
8451 function takes N named arguments, the types of which are provided in
8452 ARG_TYPES. */
8454 static tree
8455 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8456 tree *arg_types)
8458 int i;
8459 tree t = vaargs ? NULL_TREE : void_list_node;
8461 for (i = n - 1; i >= 0; i--)
8462 t = tree_cons (NULL_TREE, arg_types[i], t);
8464 return build_function_type (return_type, t);
8467 /* Build a function type. RETURN_TYPE is the type returned by the
8468 function. The function takes N named arguments, the types of which
8469 are provided in ARG_TYPES. */
8471 tree
8472 build_function_type_array (tree return_type, int n, tree *arg_types)
8474 return build_function_type_array_1 (false, return_type, n, arg_types);
8477 /* Build a variable argument function type. RETURN_TYPE is the type
8478 returned by the function. The function takes N named arguments, the
8479 types of which are provided in ARG_TYPES. */
8481 tree
8482 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8484 return build_function_type_array_1 (true, return_type, n, arg_types);
8487 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8488 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8489 for the method. An implicit additional parameter (of type
8490 pointer-to-BASETYPE) is added to the ARGTYPES. */
8492 tree
8493 build_method_type_directly (tree basetype,
8494 tree rettype,
8495 tree argtypes)
8497 tree t;
8498 tree ptype;
8499 bool any_structural_p, any_noncanonical_p;
8500 tree canon_argtypes;
8502 /* Make a node of the sort we want. */
8503 t = make_node (METHOD_TYPE);
8505 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8506 TREE_TYPE (t) = rettype;
8507 ptype = build_pointer_type (basetype);
8509 /* The actual arglist for this function includes a "hidden" argument
8510 which is "this". Put it into the list of argument types. */
8511 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8512 TYPE_ARG_TYPES (t) = argtypes;
8514 /* If we already have such a type, use the old one. */
8515 hashval_t hash = type_hash_canon_hash (t);
8516 t = type_hash_canon (hash, t);
8518 /* Set up the canonical type. */
8519 any_structural_p
8520 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8521 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8522 any_noncanonical_p
8523 = (TYPE_CANONICAL (basetype) != basetype
8524 || TYPE_CANONICAL (rettype) != rettype);
8525 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8526 &any_structural_p,
8527 &any_noncanonical_p);
8528 if (any_structural_p)
8529 SET_TYPE_STRUCTURAL_EQUALITY (t);
8530 else if (any_noncanonical_p)
8531 TYPE_CANONICAL (t)
8532 = build_method_type_directly (TYPE_CANONICAL (basetype),
8533 TYPE_CANONICAL (rettype),
8534 canon_argtypes);
8535 if (!COMPLETE_TYPE_P (t))
8536 layout_type (t);
8538 return t;
8541 /* Construct, lay out and return the type of methods belonging to class
8542 BASETYPE and whose arguments and values are described by TYPE.
8543 If that type exists already, reuse it.
8544 TYPE must be a FUNCTION_TYPE node. */
8546 tree
8547 build_method_type (tree basetype, tree type)
8549 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8551 return build_method_type_directly (basetype,
8552 TREE_TYPE (type),
8553 TYPE_ARG_TYPES (type));
8556 /* Construct, lay out and return the type of offsets to a value
8557 of type TYPE, within an object of type BASETYPE.
8558 If a suitable offset type exists already, reuse it. */
8560 tree
8561 build_offset_type (tree basetype, tree type)
8563 tree t;
8565 /* Make a node of the sort we want. */
8566 t = make_node (OFFSET_TYPE);
8568 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8569 TREE_TYPE (t) = type;
8571 /* If we already have such a type, use the old one. */
8572 hashval_t hash = type_hash_canon_hash (t);
8573 t = type_hash_canon (hash, t);
8575 if (!COMPLETE_TYPE_P (t))
8576 layout_type (t);
8578 if (TYPE_CANONICAL (t) == t)
8580 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8581 || TYPE_STRUCTURAL_EQUALITY_P (type))
8582 SET_TYPE_STRUCTURAL_EQUALITY (t);
8583 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8584 || TYPE_CANONICAL (type) != type)
8585 TYPE_CANONICAL (t)
8586 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8587 TYPE_CANONICAL (type));
8590 return t;
8593 /* Create a complex type whose components are COMPONENT_TYPE.
8595 If NAMED is true, the type is given a TYPE_NAME. We do not always
8596 do so because this creates a DECL node and thus make the DECL_UIDs
8597 dependent on the type canonicalization hashtable, which is GC-ed,
8598 so the DECL_UIDs would not be stable wrt garbage collection. */
8600 tree
8601 build_complex_type (tree component_type, bool named)
8603 gcc_assert (INTEGRAL_TYPE_P (component_type)
8604 || SCALAR_FLOAT_TYPE_P (component_type)
8605 || FIXED_POINT_TYPE_P (component_type));
8607 /* Make a node of the sort we want. */
8608 tree probe = make_node (COMPLEX_TYPE);
8610 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8612 /* If we already have such a type, use the old one. */
8613 hashval_t hash = type_hash_canon_hash (probe);
8614 tree t = type_hash_canon (hash, probe);
8616 if (t == probe)
8618 /* We created a new type. The hash insertion will have laid
8619 out the type. We need to check the canonicalization and
8620 maybe set the name. */
8621 gcc_checking_assert (COMPLETE_TYPE_P (t)
8622 && !TYPE_NAME (t)
8623 && TYPE_CANONICAL (t) == t);
8625 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8626 SET_TYPE_STRUCTURAL_EQUALITY (t);
8627 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8628 TYPE_CANONICAL (t)
8629 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8631 /* We need to create a name, since complex is a fundamental type. */
8632 if (named)
8634 const char *name = NULL;
8636 if (TREE_TYPE (t) == char_type_node)
8637 name = "complex char";
8638 else if (TREE_TYPE (t) == signed_char_type_node)
8639 name = "complex signed char";
8640 else if (TREE_TYPE (t) == unsigned_char_type_node)
8641 name = "complex unsigned char";
8642 else if (TREE_TYPE (t) == short_integer_type_node)
8643 name = "complex short int";
8644 else if (TREE_TYPE (t) == short_unsigned_type_node)
8645 name = "complex short unsigned int";
8646 else if (TREE_TYPE (t) == integer_type_node)
8647 name = "complex int";
8648 else if (TREE_TYPE (t) == unsigned_type_node)
8649 name = "complex unsigned int";
8650 else if (TREE_TYPE (t) == long_integer_type_node)
8651 name = "complex long int";
8652 else if (TREE_TYPE (t) == long_unsigned_type_node)
8653 name = "complex long unsigned int";
8654 else if (TREE_TYPE (t) == long_long_integer_type_node)
8655 name = "complex long long int";
8656 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8657 name = "complex long long unsigned int";
8659 if (name != NULL)
8660 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8661 get_identifier (name), t);
8665 return build_qualified_type (t, TYPE_QUALS (component_type));
8668 /* If TYPE is a real or complex floating-point type and the target
8669 does not directly support arithmetic on TYPE then return the wider
8670 type to be used for arithmetic on TYPE. Otherwise, return
8671 NULL_TREE. */
8673 tree
8674 excess_precision_type (tree type)
8676 /* The target can give two different responses to the question of
8677 which excess precision mode it would like depending on whether we
8678 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8680 enum excess_precision_type requested_type
8681 = (flag_excess_precision == EXCESS_PRECISION_FAST
8682 ? EXCESS_PRECISION_TYPE_FAST
8683 : EXCESS_PRECISION_TYPE_STANDARD);
8685 enum flt_eval_method target_flt_eval_method
8686 = targetm.c.excess_precision (requested_type);
8688 /* The target should not ask for unpredictable float evaluation (though
8689 it might advertise that implicitly the evaluation is unpredictable,
8690 but we don't care about that here, it will have been reported
8691 elsewhere). If it does ask for unpredictable evaluation, we have
8692 nothing to do here. */
8693 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8695 /* Nothing to do. The target has asked for all types we know about
8696 to be computed with their native precision and range. */
8697 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8698 return NULL_TREE;
8700 /* The target will promote this type in a target-dependent way, so excess
8701 precision ought to leave it alone. */
8702 if (targetm.promoted_type (type) != NULL_TREE)
8703 return NULL_TREE;
8705 machine_mode float16_type_mode = (float16_type_node
8706 ? TYPE_MODE (float16_type_node)
8707 : VOIDmode);
8708 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8709 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8711 switch (TREE_CODE (type))
8713 case REAL_TYPE:
8715 machine_mode type_mode = TYPE_MODE (type);
8716 switch (target_flt_eval_method)
8718 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8719 if (type_mode == float16_type_mode)
8720 return float_type_node;
8721 break;
8722 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8723 if (type_mode == float16_type_mode
8724 || type_mode == float_type_mode)
8725 return double_type_node;
8726 break;
8727 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8728 if (type_mode == float16_type_mode
8729 || type_mode == float_type_mode
8730 || type_mode == double_type_mode)
8731 return long_double_type_node;
8732 break;
8733 default:
8734 gcc_unreachable ();
8736 break;
8738 case COMPLEX_TYPE:
8740 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8741 return NULL_TREE;
8742 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8743 switch (target_flt_eval_method)
8745 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8746 if (type_mode == float16_type_mode)
8747 return complex_float_type_node;
8748 break;
8749 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8750 if (type_mode == float16_type_mode
8751 || type_mode == float_type_mode)
8752 return complex_double_type_node;
8753 break;
8754 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8755 if (type_mode == float16_type_mode
8756 || type_mode == float_type_mode
8757 || type_mode == double_type_mode)
8758 return complex_long_double_type_node;
8759 break;
8760 default:
8761 gcc_unreachable ();
8763 break;
8765 default:
8766 break;
8769 return NULL_TREE;
8772 /* Return OP, stripped of any conversions to wider types as much as is safe.
8773 Converting the value back to OP's type makes a value equivalent to OP.
8775 If FOR_TYPE is nonzero, we return a value which, if converted to
8776 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8778 OP must have integer, real or enumeral type. Pointers are not allowed!
8780 There are some cases where the obvious value we could return
8781 would regenerate to OP if converted to OP's type,
8782 but would not extend like OP to wider types.
8783 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8784 For example, if OP is (unsigned short)(signed char)-1,
8785 we avoid returning (signed char)-1 if FOR_TYPE is int,
8786 even though extending that to an unsigned short would regenerate OP,
8787 since the result of extending (signed char)-1 to (int)
8788 is different from (int) OP. */
8790 tree
8791 get_unwidened (tree op, tree for_type)
8793 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8794 tree type = TREE_TYPE (op);
8795 unsigned final_prec
8796 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8797 int uns
8798 = (for_type != 0 && for_type != type
8799 && final_prec > TYPE_PRECISION (type)
8800 && TYPE_UNSIGNED (type));
8801 tree win = op;
8803 while (CONVERT_EXPR_P (op))
8805 int bitschange;
8807 /* TYPE_PRECISION on vector types has different meaning
8808 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8809 so avoid them here. */
8810 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8811 break;
8813 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8814 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8816 /* Truncations are many-one so cannot be removed.
8817 Unless we are later going to truncate down even farther. */
8818 if (bitschange < 0
8819 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8820 break;
8822 /* See what's inside this conversion. If we decide to strip it,
8823 we will set WIN. */
8824 op = TREE_OPERAND (op, 0);
8826 /* If we have not stripped any zero-extensions (uns is 0),
8827 we can strip any kind of extension.
8828 If we have previously stripped a zero-extension,
8829 only zero-extensions can safely be stripped.
8830 Any extension can be stripped if the bits it would produce
8831 are all going to be discarded later by truncating to FOR_TYPE. */
8833 if (bitschange > 0)
8835 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8836 win = op;
8837 /* TYPE_UNSIGNED says whether this is a zero-extension.
8838 Let's avoid computing it if it does not affect WIN
8839 and if UNS will not be needed again. */
8840 if ((uns
8841 || CONVERT_EXPR_P (op))
8842 && TYPE_UNSIGNED (TREE_TYPE (op)))
8844 uns = 1;
8845 win = op;
8850 /* If we finally reach a constant see if it fits in sth smaller and
8851 in that case convert it. */
8852 if (TREE_CODE (win) == INTEGER_CST)
8854 tree wtype = TREE_TYPE (win);
8855 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8856 if (for_type)
8857 prec = MAX (prec, final_prec);
8858 if (prec < TYPE_PRECISION (wtype))
8860 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8861 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8862 win = fold_convert (t, win);
8866 return win;
8869 /* Return OP or a simpler expression for a narrower value
8870 which can be sign-extended or zero-extended to give back OP.
8871 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8872 or 0 if the value should be sign-extended. */
8874 tree
8875 get_narrower (tree op, int *unsignedp_ptr)
8877 int uns = 0;
8878 int first = 1;
8879 tree win = op;
8880 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8882 if (TREE_CODE (op) == COMPOUND_EXPR)
8885 op = TREE_OPERAND (op, 1);
8886 while (TREE_CODE (op) == COMPOUND_EXPR);
8887 tree ret = get_narrower (op, unsignedp_ptr);
8888 if (ret == op)
8889 return win;
8890 auto_vec <tree, 16> v;
8891 unsigned int i;
8892 for (tree op = win; TREE_CODE (op) == COMPOUND_EXPR;
8893 op = TREE_OPERAND (op, 1))
8894 v.safe_push (op);
8895 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8896 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8897 TREE_TYPE (win), TREE_OPERAND (op, 0),
8898 ret);
8899 return ret;
8901 while (TREE_CODE (op) == NOP_EXPR)
8903 int bitschange
8904 = (TYPE_PRECISION (TREE_TYPE (op))
8905 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8907 /* Truncations are many-one so cannot be removed. */
8908 if (bitschange < 0)
8909 break;
8911 /* See what's inside this conversion. If we decide to strip it,
8912 we will set WIN. */
8914 if (bitschange > 0)
8916 op = TREE_OPERAND (op, 0);
8917 /* An extension: the outermost one can be stripped,
8918 but remember whether it is zero or sign extension. */
8919 if (first)
8920 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8921 /* Otherwise, if a sign extension has been stripped,
8922 only sign extensions can now be stripped;
8923 if a zero extension has been stripped, only zero-extensions. */
8924 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8925 break;
8926 first = 0;
8928 else /* bitschange == 0 */
8930 /* A change in nominal type can always be stripped, but we must
8931 preserve the unsignedness. */
8932 if (first)
8933 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8934 first = 0;
8935 op = TREE_OPERAND (op, 0);
8936 /* Keep trying to narrow, but don't assign op to win if it
8937 would turn an integral type into something else. */
8938 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8939 continue;
8942 win = op;
8945 if (TREE_CODE (op) == COMPONENT_REF
8946 /* Since type_for_size always gives an integer type. */
8947 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8948 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8949 /* Ensure field is laid out already. */
8950 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8951 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8953 unsigned HOST_WIDE_INT innerprec
8954 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8955 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8956 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8957 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8959 /* We can get this structure field in a narrower type that fits it,
8960 but the resulting extension to its nominal type (a fullword type)
8961 must satisfy the same conditions as for other extensions.
8963 Do this only for fields that are aligned (not bit-fields),
8964 because when bit-field insns will be used there is no
8965 advantage in doing this. */
8967 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8968 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8969 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8970 && type != 0)
8972 if (first)
8973 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8974 win = fold_convert (type, op);
8978 *unsignedp_ptr = uns;
8979 return win;
8982 /* Return true if integer constant C has a value that is permissible
8983 for TYPE, an integral type. */
8985 bool
8986 int_fits_type_p (const_tree c, const_tree type)
8988 tree type_low_bound, type_high_bound;
8989 bool ok_for_low_bound, ok_for_high_bound;
8990 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8992 /* Non-standard boolean types can have arbitrary precision but various
8993 transformations assume that they can only take values 0 and +/-1. */
8994 if (TREE_CODE (type) == BOOLEAN_TYPE)
8995 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8997 retry:
8998 type_low_bound = TYPE_MIN_VALUE (type);
8999 type_high_bound = TYPE_MAX_VALUE (type);
9001 /* If at least one bound of the type is a constant integer, we can check
9002 ourselves and maybe make a decision. If no such decision is possible, but
9003 this type is a subtype, try checking against that. Otherwise, use
9004 fits_to_tree_p, which checks against the precision.
9006 Compute the status for each possibly constant bound, and return if we see
9007 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9008 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9009 for "constant known to fit". */
9011 /* Check if c >= type_low_bound. */
9012 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9014 if (tree_int_cst_lt (c, type_low_bound))
9015 return false;
9016 ok_for_low_bound = true;
9018 else
9019 ok_for_low_bound = false;
9021 /* Check if c <= type_high_bound. */
9022 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9024 if (tree_int_cst_lt (type_high_bound, c))
9025 return false;
9026 ok_for_high_bound = true;
9028 else
9029 ok_for_high_bound = false;
9031 /* If the constant fits both bounds, the result is known. */
9032 if (ok_for_low_bound && ok_for_high_bound)
9033 return true;
9035 /* Perform some generic filtering which may allow making a decision
9036 even if the bounds are not constant. First, negative integers
9037 never fit in unsigned types, */
9038 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9039 return false;
9041 /* Second, narrower types always fit in wider ones. */
9042 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9043 return true;
9045 /* Third, unsigned integers with top bit set never fit signed types. */
9046 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9048 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9049 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9051 /* When a tree_cst is converted to a wide-int, the precision
9052 is taken from the type. However, if the precision of the
9053 mode underneath the type is smaller than that, it is
9054 possible that the value will not fit. The test below
9055 fails if any bit is set between the sign bit of the
9056 underlying mode and the top bit of the type. */
9057 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9058 return false;
9060 else if (wi::neg_p (wi::to_wide (c)))
9061 return false;
9064 /* If we haven't been able to decide at this point, there nothing more we
9065 can check ourselves here. Look at the base type if we have one and it
9066 has the same precision. */
9067 if (TREE_CODE (type) == INTEGER_TYPE
9068 && TREE_TYPE (type) != 0
9069 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9071 type = TREE_TYPE (type);
9072 goto retry;
9075 /* Or to fits_to_tree_p, if nothing else. */
9076 return wi::fits_to_tree_p (wi::to_wide (c), type);
9079 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9080 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9081 represented (assuming two's-complement arithmetic) within the bit
9082 precision of the type are returned instead. */
9084 void
9085 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9087 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9088 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9089 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9090 else
9092 if (TYPE_UNSIGNED (type))
9093 mpz_set_ui (min, 0);
9094 else
9096 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9097 wi::to_mpz (mn, min, SIGNED);
9101 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9102 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9103 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9104 else
9106 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9107 wi::to_mpz (mn, max, TYPE_SIGN (type));
9111 /* Return true if VAR is an automatic variable. */
9113 bool
9114 auto_var_p (const_tree var)
9116 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9117 || TREE_CODE (var) == PARM_DECL)
9118 && ! TREE_STATIC (var))
9119 || TREE_CODE (var) == RESULT_DECL);
9122 /* Return true if VAR is an automatic variable defined in function FN. */
9124 bool
9125 auto_var_in_fn_p (const_tree var, const_tree fn)
9127 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9128 && (auto_var_p (var)
9129 || TREE_CODE (var) == LABEL_DECL));
9132 /* Subprogram of following function. Called by walk_tree.
9134 Return *TP if it is an automatic variable or parameter of the
9135 function passed in as DATA. */
9137 static tree
9138 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9140 tree fn = (tree) data;
9142 if (TYPE_P (*tp))
9143 *walk_subtrees = 0;
9145 else if (DECL_P (*tp)
9146 && auto_var_in_fn_p (*tp, fn))
9147 return *tp;
9149 return NULL_TREE;
9152 /* Returns true if T is, contains, or refers to a type with variable
9153 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9154 arguments, but not the return type. If FN is nonzero, only return
9155 true if a modifier of the type or position of FN is a variable or
9156 parameter inside FN.
9158 This concept is more general than that of C99 'variably modified types':
9159 in C99, a struct type is never variably modified because a VLA may not
9160 appear as a structure member. However, in GNU C code like:
9162 struct S { int i[f()]; };
9164 is valid, and other languages may define similar constructs. */
9166 bool
9167 variably_modified_type_p (tree type, tree fn)
9169 tree t;
9171 /* Test if T is either variable (if FN is zero) or an expression containing
9172 a variable in FN. If TYPE isn't gimplified, return true also if
9173 gimplify_one_sizepos would gimplify the expression into a local
9174 variable. */
9175 #define RETURN_TRUE_IF_VAR(T) \
9176 do { tree _t = (T); \
9177 if (_t != NULL_TREE \
9178 && _t != error_mark_node \
9179 && !CONSTANT_CLASS_P (_t) \
9180 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9181 && (!fn \
9182 || (!TYPE_SIZES_GIMPLIFIED (type) \
9183 && (TREE_CODE (_t) != VAR_DECL \
9184 && !CONTAINS_PLACEHOLDER_P (_t))) \
9185 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9186 return true; } while (0)
9188 if (type == error_mark_node)
9189 return false;
9191 /* If TYPE itself has variable size, it is variably modified. */
9192 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9193 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9195 switch (TREE_CODE (type))
9197 case POINTER_TYPE:
9198 case REFERENCE_TYPE:
9199 case VECTOR_TYPE:
9200 /* Ada can have pointer types refering to themselves indirectly. */
9201 if (TREE_VISITED (type))
9202 return false;
9203 TREE_VISITED (type) = true;
9204 if (variably_modified_type_p (TREE_TYPE (type), fn))
9206 TREE_VISITED (type) = false;
9207 return true;
9209 TREE_VISITED (type) = false;
9210 break;
9212 case FUNCTION_TYPE:
9213 case METHOD_TYPE:
9214 /* If TYPE is a function type, it is variably modified if the
9215 return type is variably modified. */
9216 if (variably_modified_type_p (TREE_TYPE (type), fn))
9217 return true;
9218 break;
9220 case INTEGER_TYPE:
9221 case REAL_TYPE:
9222 case FIXED_POINT_TYPE:
9223 case ENUMERAL_TYPE:
9224 case BOOLEAN_TYPE:
9225 /* Scalar types are variably modified if their end points
9226 aren't constant. */
9227 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9228 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9229 break;
9231 case RECORD_TYPE:
9232 case UNION_TYPE:
9233 case QUAL_UNION_TYPE:
9234 /* We can't see if any of the fields are variably-modified by the
9235 definition we normally use, since that would produce infinite
9236 recursion via pointers. */
9237 /* This is variably modified if some field's type is. */
9238 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9239 if (TREE_CODE (t) == FIELD_DECL)
9241 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9242 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9243 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9245 /* If the type is a qualified union, then the DECL_QUALIFIER
9246 of fields can also be an expression containing a variable. */
9247 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9248 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9250 /* If the field is a qualified union, then it's only a container
9251 for what's inside so we look into it. That's necessary in LTO
9252 mode because the sizes of the field tested above have been set
9253 to PLACEHOLDER_EXPRs by free_lang_data. */
9254 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9255 && variably_modified_type_p (TREE_TYPE (t), fn))
9256 return true;
9258 break;
9260 case ARRAY_TYPE:
9261 /* Do not call ourselves to avoid infinite recursion. This is
9262 variably modified if the element type is. */
9263 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9264 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9265 break;
9267 default:
9268 break;
9271 /* The current language may have other cases to check, but in general,
9272 all other types are not variably modified. */
9273 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9275 #undef RETURN_TRUE_IF_VAR
9278 /* Given a DECL or TYPE, return the scope in which it was declared, or
9279 NULL_TREE if there is no containing scope. */
9281 tree
9282 get_containing_scope (const_tree t)
9284 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9287 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9289 const_tree
9290 get_ultimate_context (const_tree decl)
9292 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9294 if (TREE_CODE (decl) == BLOCK)
9295 decl = BLOCK_SUPERCONTEXT (decl);
9296 else
9297 decl = get_containing_scope (decl);
9299 return decl;
9302 /* Return the innermost context enclosing DECL that is
9303 a FUNCTION_DECL, or zero if none. */
9305 tree
9306 decl_function_context (const_tree decl)
9308 tree context;
9310 if (TREE_CODE (decl) == ERROR_MARK)
9311 return 0;
9313 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9314 where we look up the function at runtime. Such functions always take
9315 a first argument of type 'pointer to real context'.
9317 C++ should really be fixed to use DECL_CONTEXT for the real context,
9318 and use something else for the "virtual context". */
9319 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9320 context
9321 = TYPE_MAIN_VARIANT
9322 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9323 else
9324 context = DECL_CONTEXT (decl);
9326 while (context && TREE_CODE (context) != FUNCTION_DECL)
9328 if (TREE_CODE (context) == BLOCK)
9329 context = BLOCK_SUPERCONTEXT (context);
9330 else
9331 context = get_containing_scope (context);
9334 return context;
9337 /* Return the innermost context enclosing DECL that is
9338 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9339 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9341 tree
9342 decl_type_context (const_tree decl)
9344 tree context = DECL_CONTEXT (decl);
9346 while (context)
9347 switch (TREE_CODE (context))
9349 case NAMESPACE_DECL:
9350 case TRANSLATION_UNIT_DECL:
9351 return NULL_TREE;
9353 case RECORD_TYPE:
9354 case UNION_TYPE:
9355 case QUAL_UNION_TYPE:
9356 return context;
9358 case TYPE_DECL:
9359 case FUNCTION_DECL:
9360 context = DECL_CONTEXT (context);
9361 break;
9363 case BLOCK:
9364 context = BLOCK_SUPERCONTEXT (context);
9365 break;
9367 default:
9368 gcc_unreachable ();
9371 return NULL_TREE;
9374 /* CALL is a CALL_EXPR. Return the declaration for the function
9375 called, or NULL_TREE if the called function cannot be
9376 determined. */
9378 tree
9379 get_callee_fndecl (const_tree call)
9381 tree addr;
9383 if (call == error_mark_node)
9384 return error_mark_node;
9386 /* It's invalid to call this function with anything but a
9387 CALL_EXPR. */
9388 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9390 /* The first operand to the CALL is the address of the function
9391 called. */
9392 addr = CALL_EXPR_FN (call);
9394 /* If there is no function, return early. */
9395 if (addr == NULL_TREE)
9396 return NULL_TREE;
9398 STRIP_NOPS (addr);
9400 /* If this is a readonly function pointer, extract its initial value. */
9401 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9402 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9403 && DECL_INITIAL (addr))
9404 addr = DECL_INITIAL (addr);
9406 /* If the address is just `&f' for some function `f', then we know
9407 that `f' is being called. */
9408 if (TREE_CODE (addr) == ADDR_EXPR
9409 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9410 return TREE_OPERAND (addr, 0);
9412 /* We couldn't figure out what was being called. */
9413 return NULL_TREE;
9416 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9417 return the associated function code, otherwise return CFN_LAST. */
9419 combined_fn
9420 get_call_combined_fn (const_tree call)
9422 /* It's invalid to call this function with anything but a CALL_EXPR. */
9423 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9425 if (!CALL_EXPR_FN (call))
9426 return as_combined_fn (CALL_EXPR_IFN (call));
9428 tree fndecl = get_callee_fndecl (call);
9429 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9430 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9432 return CFN_LAST;
9435 /* Comparator of indices based on tree_node_counts. */
9437 static int
9438 tree_nodes_cmp (const void *p1, const void *p2)
9440 const unsigned *n1 = (const unsigned *)p1;
9441 const unsigned *n2 = (const unsigned *)p2;
9443 return tree_node_counts[*n1] - tree_node_counts[*n2];
9446 /* Comparator of indices based on tree_code_counts. */
9448 static int
9449 tree_codes_cmp (const void *p1, const void *p2)
9451 const unsigned *n1 = (const unsigned *)p1;
9452 const unsigned *n2 = (const unsigned *)p2;
9454 return tree_code_counts[*n1] - tree_code_counts[*n2];
9457 #define TREE_MEM_USAGE_SPACES 40
9459 /* Print debugging information about tree nodes generated during the compile,
9460 and any language-specific information. */
9462 void
9463 dump_tree_statistics (void)
9465 if (GATHER_STATISTICS)
9467 uint64_t total_nodes, total_bytes;
9468 fprintf (stderr, "\nKind Nodes Bytes\n");
9469 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9470 total_nodes = total_bytes = 0;
9473 auto_vec<unsigned> indices (all_kinds);
9474 for (unsigned i = 0; i < all_kinds; i++)
9475 indices.quick_push (i);
9476 indices.qsort (tree_nodes_cmp);
9478 for (unsigned i = 0; i < (int) all_kinds; i++)
9480 unsigned j = indices[i];
9481 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9482 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9483 SIZE_AMOUNT (tree_node_sizes[j]));
9484 total_nodes += tree_node_counts[j];
9485 total_bytes += tree_node_sizes[j];
9487 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9488 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9489 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9490 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9494 fprintf (stderr, "Code Nodes\n");
9495 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9497 auto_vec<unsigned> indices (MAX_TREE_CODES);
9498 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9499 indices.quick_push (i);
9500 indices.qsort (tree_codes_cmp);
9502 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9504 unsigned j = indices[i];
9505 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9506 get_tree_code_name ((enum tree_code) j),
9507 SIZE_AMOUNT (tree_code_counts[j]));
9509 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9510 fprintf (stderr, "\n");
9511 ssanames_print_statistics ();
9512 fprintf (stderr, "\n");
9513 phinodes_print_statistics ();
9514 fprintf (stderr, "\n");
9517 else
9518 fprintf (stderr, "(No per-node statistics)\n");
9520 print_type_hash_statistics ();
9521 print_debug_expr_statistics ();
9522 print_value_expr_statistics ();
9523 lang_hooks.print_statistics ();
9526 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9528 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9530 unsigned
9531 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9533 /* This relies on the raw feedback's top 4 bits being zero. */
9534 #define FEEDBACK(X) ((X) * 0x04c11db7)
9535 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9536 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9537 static const unsigned syndromes[16] =
9539 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9540 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9541 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9542 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9544 #undef FEEDBACK
9545 #undef SYNDROME
9547 value <<= (32 - bytes * 8);
9548 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9550 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9552 chksum = (chksum << 4) ^ feedback;
9555 return chksum;
9558 /* Generate a crc32 of a string. */
9560 unsigned
9561 crc32_string (unsigned chksum, const char *string)
9564 chksum = crc32_byte (chksum, *string);
9565 while (*string++);
9566 return chksum;
9569 /* P is a string that will be used in a symbol. Mask out any characters
9570 that are not valid in that context. */
9572 void
9573 clean_symbol_name (char *p)
9575 for (; *p; p++)
9576 if (! (ISALNUM (*p)
9577 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9578 || *p == '$'
9579 #endif
9580 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9581 || *p == '.'
9582 #endif
9584 *p = '_';
9587 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9589 /* Create a unique anonymous identifier. The identifier is still a
9590 valid assembly label. */
9592 tree
9593 make_anon_name ()
9595 const char *fmt =
9596 #if !defined (NO_DOT_IN_LABEL)
9598 #elif !defined (NO_DOLLAR_IN_LABEL)
9600 #else
9602 #endif
9603 "_anon_%d";
9605 char buf[24];
9606 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9607 gcc_checking_assert (len < int (sizeof (buf)));
9609 tree id = get_identifier_with_length (buf, len);
9610 IDENTIFIER_ANON_P (id) = true;
9612 return id;
9615 /* Generate a name for a special-purpose function.
9616 The generated name may need to be unique across the whole link.
9617 Changes to this function may also require corresponding changes to
9618 xstrdup_mask_random.
9619 TYPE is some string to identify the purpose of this function to the
9620 linker or collect2; it must start with an uppercase letter,
9621 one of:
9622 I - for constructors
9623 D - for destructors
9624 N - for C++ anonymous namespaces
9625 F - for DWARF unwind frame information. */
9627 tree
9628 get_file_function_name (const char *type)
9630 char *buf;
9631 const char *p;
9632 char *q;
9634 /* If we already have a name we know to be unique, just use that. */
9635 if (first_global_object_name)
9636 p = q = ASTRDUP (first_global_object_name);
9637 /* If the target is handling the constructors/destructors, they
9638 will be local to this file and the name is only necessary for
9639 debugging purposes.
9640 We also assign sub_I and sub_D sufixes to constructors called from
9641 the global static constructors. These are always local. */
9642 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9643 || (strncmp (type, "sub_", 4) == 0
9644 && (type[4] == 'I' || type[4] == 'D')))
9646 const char *file = main_input_filename;
9647 if (! file)
9648 file = LOCATION_FILE (input_location);
9649 /* Just use the file's basename, because the full pathname
9650 might be quite long. */
9651 p = q = ASTRDUP (lbasename (file));
9653 else
9655 /* Otherwise, the name must be unique across the entire link.
9656 We don't have anything that we know to be unique to this translation
9657 unit, so use what we do have and throw in some randomness. */
9658 unsigned len;
9659 const char *name = weak_global_object_name;
9660 const char *file = main_input_filename;
9662 if (! name)
9663 name = "";
9664 if (! file)
9665 file = LOCATION_FILE (input_location);
9667 len = strlen (file);
9668 q = (char *) alloca (9 + 19 + len + 1);
9669 memcpy (q, file, len + 1);
9671 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9672 crc32_string (0, name), get_random_seed (false));
9674 p = q;
9677 clean_symbol_name (q);
9678 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9679 + strlen (type));
9681 /* Set up the name of the file-level functions we may need.
9682 Use a global object (which is already required to be unique over
9683 the program) rather than the file name (which imposes extra
9684 constraints). */
9685 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9687 return get_identifier (buf);
9690 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9692 /* Complain that the tree code of NODE does not match the expected 0
9693 terminated list of trailing codes. The trailing code list can be
9694 empty, for a more vague error message. FILE, LINE, and FUNCTION
9695 are of the caller. */
9697 void
9698 tree_check_failed (const_tree node, const char *file,
9699 int line, const char *function, ...)
9701 va_list args;
9702 const char *buffer;
9703 unsigned length = 0;
9704 enum tree_code code;
9706 va_start (args, function);
9707 while ((code = (enum tree_code) va_arg (args, int)))
9708 length += 4 + strlen (get_tree_code_name (code));
9709 va_end (args);
9710 if (length)
9712 char *tmp;
9713 va_start (args, function);
9714 length += strlen ("expected ");
9715 buffer = tmp = (char *) alloca (length);
9716 length = 0;
9717 while ((code = (enum tree_code) va_arg (args, int)))
9719 const char *prefix = length ? " or " : "expected ";
9721 strcpy (tmp + length, prefix);
9722 length += strlen (prefix);
9723 strcpy (tmp + length, get_tree_code_name (code));
9724 length += strlen (get_tree_code_name (code));
9726 va_end (args);
9728 else
9729 buffer = "unexpected node";
9731 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9732 buffer, get_tree_code_name (TREE_CODE (node)),
9733 function, trim_filename (file), line);
9736 /* Complain that the tree code of NODE does match the expected 0
9737 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9738 the caller. */
9740 void
9741 tree_not_check_failed (const_tree node, const char *file,
9742 int line, const char *function, ...)
9744 va_list args;
9745 char *buffer;
9746 unsigned length = 0;
9747 enum tree_code code;
9749 va_start (args, function);
9750 while ((code = (enum tree_code) va_arg (args, int)))
9751 length += 4 + strlen (get_tree_code_name (code));
9752 va_end (args);
9753 va_start (args, function);
9754 buffer = (char *) alloca (length);
9755 length = 0;
9756 while ((code = (enum tree_code) va_arg (args, int)))
9758 if (length)
9760 strcpy (buffer + length, " or ");
9761 length += 4;
9763 strcpy (buffer + length, get_tree_code_name (code));
9764 length += strlen (get_tree_code_name (code));
9766 va_end (args);
9768 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9769 buffer, get_tree_code_name (TREE_CODE (node)),
9770 function, trim_filename (file), line);
9773 /* Similar to tree_check_failed, except that we check for a class of tree
9774 code, given in CL. */
9776 void
9777 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9778 const char *file, int line, const char *function)
9780 internal_error
9781 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9782 TREE_CODE_CLASS_STRING (cl),
9783 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9784 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9787 /* Similar to tree_check_failed, except that instead of specifying a
9788 dozen codes, use the knowledge that they're all sequential. */
9790 void
9791 tree_range_check_failed (const_tree node, const char *file, int line,
9792 const char *function, enum tree_code c1,
9793 enum tree_code c2)
9795 char *buffer;
9796 unsigned length = 0;
9797 unsigned int c;
9799 for (c = c1; c <= c2; ++c)
9800 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9802 length += strlen ("expected ");
9803 buffer = (char *) alloca (length);
9804 length = 0;
9806 for (c = c1; c <= c2; ++c)
9808 const char *prefix = length ? " or " : "expected ";
9810 strcpy (buffer + length, prefix);
9811 length += strlen (prefix);
9812 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9813 length += strlen (get_tree_code_name ((enum tree_code) c));
9816 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9817 buffer, get_tree_code_name (TREE_CODE (node)),
9818 function, trim_filename (file), line);
9822 /* Similar to tree_check_failed, except that we check that a tree does
9823 not have the specified code, given in CL. */
9825 void
9826 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9827 const char *file, int line, const char *function)
9829 internal_error
9830 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9831 TREE_CODE_CLASS_STRING (cl),
9832 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9833 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9837 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9839 void
9840 omp_clause_check_failed (const_tree node, const char *file, int line,
9841 const char *function, enum omp_clause_code code)
9843 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9844 "in %s, at %s:%d",
9845 omp_clause_code_name[code],
9846 get_tree_code_name (TREE_CODE (node)),
9847 function, trim_filename (file), line);
9851 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9853 void
9854 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9855 const char *function, enum omp_clause_code c1,
9856 enum omp_clause_code c2)
9858 char *buffer;
9859 unsigned length = 0;
9860 unsigned int c;
9862 for (c = c1; c <= c2; ++c)
9863 length += 4 + strlen (omp_clause_code_name[c]);
9865 length += strlen ("expected ");
9866 buffer = (char *) alloca (length);
9867 length = 0;
9869 for (c = c1; c <= c2; ++c)
9871 const char *prefix = length ? " or " : "expected ";
9873 strcpy (buffer + length, prefix);
9874 length += strlen (prefix);
9875 strcpy (buffer + length, omp_clause_code_name[c]);
9876 length += strlen (omp_clause_code_name[c]);
9879 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9880 buffer, omp_clause_code_name[TREE_CODE (node)],
9881 function, trim_filename (file), line);
9885 #undef DEFTREESTRUCT
9886 #define DEFTREESTRUCT(VAL, NAME) NAME,
9888 static const char *ts_enum_names[] = {
9889 #include "treestruct.def"
9891 #undef DEFTREESTRUCT
9893 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9895 /* Similar to tree_class_check_failed, except that we check for
9896 whether CODE contains the tree structure identified by EN. */
9898 void
9899 tree_contains_struct_check_failed (const_tree node,
9900 const enum tree_node_structure_enum en,
9901 const char *file, int line,
9902 const char *function)
9904 internal_error
9905 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9906 TS_ENUM_NAME (en),
9907 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9911 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9912 (dynamically sized) vector. */
9914 void
9915 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9916 const char *function)
9918 internal_error
9919 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9920 "at %s:%d",
9921 idx + 1, len, function, trim_filename (file), line);
9924 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9925 (dynamically sized) vector. */
9927 void
9928 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9929 const char *function)
9931 internal_error
9932 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9933 idx + 1, len, function, trim_filename (file), line);
9936 /* Similar to above, except that the check is for the bounds of the operand
9937 vector of an expression node EXP. */
9939 void
9940 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9941 int line, const char *function)
9943 enum tree_code code = TREE_CODE (exp);
9944 internal_error
9945 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9946 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9947 function, trim_filename (file), line);
9950 /* Similar to above, except that the check is for the number of
9951 operands of an OMP_CLAUSE node. */
9953 void
9954 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9955 int line, const char *function)
9957 internal_error
9958 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9959 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9960 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9961 trim_filename (file), line);
9963 #endif /* ENABLE_TREE_CHECKING */
9965 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9966 and mapped to the machine mode MODE. Initialize its fields and build
9967 the information necessary for debugging output. */
9969 static tree
9970 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9972 tree t;
9973 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9975 t = make_node (VECTOR_TYPE);
9976 TREE_TYPE (t) = mv_innertype;
9977 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9978 SET_TYPE_MODE (t, mode);
9980 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9981 SET_TYPE_STRUCTURAL_EQUALITY (t);
9982 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9983 || mode != VOIDmode)
9984 && !VECTOR_BOOLEAN_TYPE_P (t))
9985 TYPE_CANONICAL (t)
9986 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9988 layout_type (t);
9990 hashval_t hash = type_hash_canon_hash (t);
9991 t = type_hash_canon (hash, t);
9993 /* We have built a main variant, based on the main variant of the
9994 inner type. Use it to build the variant we return. */
9995 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9996 && TREE_TYPE (t) != innertype)
9997 return build_type_attribute_qual_variant (t,
9998 TYPE_ATTRIBUTES (innertype),
9999 TYPE_QUALS (innertype));
10001 return t;
10004 static tree
10005 make_or_reuse_type (unsigned size, int unsignedp)
10007 int i;
10009 if (size == INT_TYPE_SIZE)
10010 return unsignedp ? unsigned_type_node : integer_type_node;
10011 if (size == CHAR_TYPE_SIZE)
10012 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10013 if (size == SHORT_TYPE_SIZE)
10014 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10015 if (size == LONG_TYPE_SIZE)
10016 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10017 if (size == LONG_LONG_TYPE_SIZE)
10018 return (unsignedp ? long_long_unsigned_type_node
10019 : long_long_integer_type_node);
10021 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10022 if (size == int_n_data[i].bitsize
10023 && int_n_enabled_p[i])
10024 return (unsignedp ? int_n_trees[i].unsigned_type
10025 : int_n_trees[i].signed_type);
10027 if (unsignedp)
10028 return make_unsigned_type (size);
10029 else
10030 return make_signed_type (size);
10033 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10035 static tree
10036 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10038 if (satp)
10040 if (size == SHORT_FRACT_TYPE_SIZE)
10041 return unsignedp ? sat_unsigned_short_fract_type_node
10042 : sat_short_fract_type_node;
10043 if (size == FRACT_TYPE_SIZE)
10044 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10045 if (size == LONG_FRACT_TYPE_SIZE)
10046 return unsignedp ? sat_unsigned_long_fract_type_node
10047 : sat_long_fract_type_node;
10048 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10049 return unsignedp ? sat_unsigned_long_long_fract_type_node
10050 : sat_long_long_fract_type_node;
10052 else
10054 if (size == SHORT_FRACT_TYPE_SIZE)
10055 return unsignedp ? unsigned_short_fract_type_node
10056 : short_fract_type_node;
10057 if (size == FRACT_TYPE_SIZE)
10058 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10059 if (size == LONG_FRACT_TYPE_SIZE)
10060 return unsignedp ? unsigned_long_fract_type_node
10061 : long_fract_type_node;
10062 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10063 return unsignedp ? unsigned_long_long_fract_type_node
10064 : long_long_fract_type_node;
10067 return make_fract_type (size, unsignedp, satp);
10070 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10072 static tree
10073 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10075 if (satp)
10077 if (size == SHORT_ACCUM_TYPE_SIZE)
10078 return unsignedp ? sat_unsigned_short_accum_type_node
10079 : sat_short_accum_type_node;
10080 if (size == ACCUM_TYPE_SIZE)
10081 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10082 if (size == LONG_ACCUM_TYPE_SIZE)
10083 return unsignedp ? sat_unsigned_long_accum_type_node
10084 : sat_long_accum_type_node;
10085 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10086 return unsignedp ? sat_unsigned_long_long_accum_type_node
10087 : sat_long_long_accum_type_node;
10089 else
10091 if (size == SHORT_ACCUM_TYPE_SIZE)
10092 return unsignedp ? unsigned_short_accum_type_node
10093 : short_accum_type_node;
10094 if (size == ACCUM_TYPE_SIZE)
10095 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10096 if (size == LONG_ACCUM_TYPE_SIZE)
10097 return unsignedp ? unsigned_long_accum_type_node
10098 : long_accum_type_node;
10099 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10100 return unsignedp ? unsigned_long_long_accum_type_node
10101 : long_long_accum_type_node;
10104 return make_accum_type (size, unsignedp, satp);
10108 /* Create an atomic variant node for TYPE. This routine is called
10109 during initialization of data types to create the 5 basic atomic
10110 types. The generic build_variant_type function requires these to
10111 already be set up in order to function properly, so cannot be
10112 called from there. If ALIGN is non-zero, then ensure alignment is
10113 overridden to this value. */
10115 static tree
10116 build_atomic_base (tree type, unsigned int align)
10118 tree t;
10120 /* Make sure its not already registered. */
10121 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10122 return t;
10124 t = build_variant_type_copy (type);
10125 set_type_quals (t, TYPE_QUAL_ATOMIC);
10127 if (align)
10128 SET_TYPE_ALIGN (t, align);
10130 return t;
10133 /* Information about the _FloatN and _FloatNx types. This must be in
10134 the same order as the corresponding TI_* enum values. */
10135 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10137 { 16, false },
10138 { 32, false },
10139 { 64, false },
10140 { 128, false },
10141 { 32, true },
10142 { 64, true },
10143 { 128, true },
10147 /* Create nodes for all integer types (and error_mark_node) using the sizes
10148 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10150 void
10151 build_common_tree_nodes (bool signed_char)
10153 int i;
10155 error_mark_node = make_node (ERROR_MARK);
10156 TREE_TYPE (error_mark_node) = error_mark_node;
10158 initialize_sizetypes ();
10160 /* Define both `signed char' and `unsigned char'. */
10161 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10162 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10163 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10164 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10166 /* Define `char', which is like either `signed char' or `unsigned char'
10167 but not the same as either. */
10168 char_type_node
10169 = (signed_char
10170 ? make_signed_type (CHAR_TYPE_SIZE)
10171 : make_unsigned_type (CHAR_TYPE_SIZE));
10172 TYPE_STRING_FLAG (char_type_node) = 1;
10174 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10175 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10176 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10177 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10178 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10179 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10180 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10181 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10183 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10185 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10186 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10188 if (int_n_enabled_p[i])
10190 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10191 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10195 /* Define a boolean type. This type only represents boolean values but
10196 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10197 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10198 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10199 TYPE_PRECISION (boolean_type_node) = 1;
10200 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10202 /* Define what type to use for size_t. */
10203 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10204 size_type_node = unsigned_type_node;
10205 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10206 size_type_node = long_unsigned_type_node;
10207 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10208 size_type_node = long_long_unsigned_type_node;
10209 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10210 size_type_node = short_unsigned_type_node;
10211 else
10213 int i;
10215 size_type_node = NULL_TREE;
10216 for (i = 0; i < NUM_INT_N_ENTS; i++)
10217 if (int_n_enabled_p[i])
10219 char name[50], altname[50];
10220 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10221 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10223 if (strcmp (name, SIZE_TYPE) == 0
10224 || strcmp (altname, SIZE_TYPE) == 0)
10226 size_type_node = int_n_trees[i].unsigned_type;
10229 if (size_type_node == NULL_TREE)
10230 gcc_unreachable ();
10233 /* Define what type to use for ptrdiff_t. */
10234 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10235 ptrdiff_type_node = integer_type_node;
10236 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10237 ptrdiff_type_node = long_integer_type_node;
10238 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10239 ptrdiff_type_node = long_long_integer_type_node;
10240 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10241 ptrdiff_type_node = short_integer_type_node;
10242 else
10244 ptrdiff_type_node = NULL_TREE;
10245 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10246 if (int_n_enabled_p[i])
10248 char name[50], altname[50];
10249 sprintf (name, "__int%d", int_n_data[i].bitsize);
10250 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10252 if (strcmp (name, PTRDIFF_TYPE) == 0
10253 || strcmp (altname, PTRDIFF_TYPE) == 0)
10254 ptrdiff_type_node = int_n_trees[i].signed_type;
10256 if (ptrdiff_type_node == NULL_TREE)
10257 gcc_unreachable ();
10260 /* Fill in the rest of the sized types. Reuse existing type nodes
10261 when possible. */
10262 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10263 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10264 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10265 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10266 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10268 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10269 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10270 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10271 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10272 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10274 /* Don't call build_qualified type for atomics. That routine does
10275 special processing for atomics, and until they are initialized
10276 it's better not to make that call.
10278 Check to see if there is a target override for atomic types. */
10280 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10281 targetm.atomic_align_for_mode (QImode));
10282 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10283 targetm.atomic_align_for_mode (HImode));
10284 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10285 targetm.atomic_align_for_mode (SImode));
10286 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10287 targetm.atomic_align_for_mode (DImode));
10288 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10289 targetm.atomic_align_for_mode (TImode));
10291 access_public_node = get_identifier ("public");
10292 access_protected_node = get_identifier ("protected");
10293 access_private_node = get_identifier ("private");
10295 /* Define these next since types below may used them. */
10296 integer_zero_node = build_int_cst (integer_type_node, 0);
10297 integer_one_node = build_int_cst (integer_type_node, 1);
10298 integer_three_node = build_int_cst (integer_type_node, 3);
10299 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10301 size_zero_node = size_int (0);
10302 size_one_node = size_int (1);
10303 bitsize_zero_node = bitsize_int (0);
10304 bitsize_one_node = bitsize_int (1);
10305 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10307 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10308 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10310 void_type_node = make_node (VOID_TYPE);
10311 layout_type (void_type_node);
10313 /* We are not going to have real types in C with less than byte alignment,
10314 so we might as well not have any types that claim to have it. */
10315 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10316 TYPE_USER_ALIGN (void_type_node) = 0;
10318 void_node = make_node (VOID_CST);
10319 TREE_TYPE (void_node) = void_type_node;
10321 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10322 layout_type (TREE_TYPE (null_pointer_node));
10324 ptr_type_node = build_pointer_type (void_type_node);
10325 const_ptr_type_node
10326 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10327 for (unsigned i = 0;
10328 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10329 ++i)
10330 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10332 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10334 float_type_node = make_node (REAL_TYPE);
10335 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10336 layout_type (float_type_node);
10338 double_type_node = make_node (REAL_TYPE);
10339 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10340 layout_type (double_type_node);
10342 long_double_type_node = make_node (REAL_TYPE);
10343 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10344 layout_type (long_double_type_node);
10346 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10348 int n = floatn_nx_types[i].n;
10349 bool extended = floatn_nx_types[i].extended;
10350 scalar_float_mode mode;
10351 if (!targetm.floatn_mode (n, extended).exists (&mode))
10352 continue;
10353 int precision = GET_MODE_PRECISION (mode);
10354 /* Work around the rs6000 KFmode having precision 113 not
10355 128. */
10356 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10357 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10358 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10359 if (!extended)
10360 gcc_assert (min_precision == n);
10361 if (precision < min_precision)
10362 precision = min_precision;
10363 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10364 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10365 layout_type (FLOATN_NX_TYPE_NODE (i));
10366 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10369 float_ptr_type_node = build_pointer_type (float_type_node);
10370 double_ptr_type_node = build_pointer_type (double_type_node);
10371 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10372 integer_ptr_type_node = build_pointer_type (integer_type_node);
10374 /* Fixed size integer types. */
10375 uint16_type_node = make_or_reuse_type (16, 1);
10376 uint32_type_node = make_or_reuse_type (32, 1);
10377 uint64_type_node = make_or_reuse_type (64, 1);
10379 /* Decimal float types. */
10380 if (targetm.decimal_float_supported_p ())
10382 dfloat32_type_node = make_node (REAL_TYPE);
10383 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10384 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10385 layout_type (dfloat32_type_node);
10387 dfloat64_type_node = make_node (REAL_TYPE);
10388 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10389 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10390 layout_type (dfloat64_type_node);
10392 dfloat128_type_node = make_node (REAL_TYPE);
10393 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10394 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10395 layout_type (dfloat128_type_node);
10398 complex_integer_type_node = build_complex_type (integer_type_node, true);
10399 complex_float_type_node = build_complex_type (float_type_node, true);
10400 complex_double_type_node = build_complex_type (double_type_node, true);
10401 complex_long_double_type_node = build_complex_type (long_double_type_node,
10402 true);
10404 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10406 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10407 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10408 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10411 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10412 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10413 sat_ ## KIND ## _type_node = \
10414 make_sat_signed_ ## KIND ## _type (SIZE); \
10415 sat_unsigned_ ## KIND ## _type_node = \
10416 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10417 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10418 unsigned_ ## KIND ## _type_node = \
10419 make_unsigned_ ## KIND ## _type (SIZE);
10421 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10422 sat_ ## WIDTH ## KIND ## _type_node = \
10423 make_sat_signed_ ## KIND ## _type (SIZE); \
10424 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10425 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10426 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10427 unsigned_ ## WIDTH ## KIND ## _type_node = \
10428 make_unsigned_ ## KIND ## _type (SIZE);
10430 /* Make fixed-point type nodes based on four different widths. */
10431 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10432 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10433 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10434 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10435 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10437 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10438 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10439 NAME ## _type_node = \
10440 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10441 u ## NAME ## _type_node = \
10442 make_or_reuse_unsigned_ ## KIND ## _type \
10443 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10444 sat_ ## NAME ## _type_node = \
10445 make_or_reuse_sat_signed_ ## KIND ## _type \
10446 (GET_MODE_BITSIZE (MODE ## mode)); \
10447 sat_u ## NAME ## _type_node = \
10448 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10449 (GET_MODE_BITSIZE (U ## MODE ## mode));
10451 /* Fixed-point type and mode nodes. */
10452 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10453 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10454 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10455 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10456 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10457 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10458 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10459 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10460 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10461 MAKE_FIXED_MODE_NODE (accum, da, DA)
10462 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10465 tree t = targetm.build_builtin_va_list ();
10467 /* Many back-ends define record types without setting TYPE_NAME.
10468 If we copied the record type here, we'd keep the original
10469 record type without a name. This breaks name mangling. So,
10470 don't copy record types and let c_common_nodes_and_builtins()
10471 declare the type to be __builtin_va_list. */
10472 if (TREE_CODE (t) != RECORD_TYPE)
10473 t = build_variant_type_copy (t);
10475 va_list_type_node = t;
10478 /* SCEV analyzer global shared trees. */
10479 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10480 TREE_TYPE (chrec_dont_know) = void_type_node;
10481 chrec_known = make_node (SCEV_KNOWN);
10482 TREE_TYPE (chrec_known) = void_type_node;
10485 /* Modify DECL for given flags.
10486 TM_PURE attribute is set only on types, so the function will modify
10487 DECL's type when ECF_TM_PURE is used. */
10489 void
10490 set_call_expr_flags (tree decl, int flags)
10492 if (flags & ECF_NOTHROW)
10493 TREE_NOTHROW (decl) = 1;
10494 if (flags & ECF_CONST)
10495 TREE_READONLY (decl) = 1;
10496 if (flags & ECF_PURE)
10497 DECL_PURE_P (decl) = 1;
10498 if (flags & ECF_LOOPING_CONST_OR_PURE)
10499 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10500 if (flags & ECF_NOVOPS)
10501 DECL_IS_NOVOPS (decl) = 1;
10502 if (flags & ECF_NORETURN)
10503 TREE_THIS_VOLATILE (decl) = 1;
10504 if (flags & ECF_MALLOC)
10505 DECL_IS_MALLOC (decl) = 1;
10506 if (flags & ECF_RETURNS_TWICE)
10507 DECL_IS_RETURNS_TWICE (decl) = 1;
10508 if (flags & ECF_LEAF)
10509 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10510 NULL, DECL_ATTRIBUTES (decl));
10511 if (flags & ECF_COLD)
10512 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10513 NULL, DECL_ATTRIBUTES (decl));
10514 if (flags & ECF_RET1)
10515 DECL_ATTRIBUTES (decl)
10516 = tree_cons (get_identifier ("fn spec"),
10517 build_tree_list (NULL_TREE, build_string (1, "1")),
10518 DECL_ATTRIBUTES (decl));
10519 if ((flags & ECF_TM_PURE) && flag_tm)
10520 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10521 /* Looping const or pure is implied by noreturn.
10522 There is currently no way to declare looping const or looping pure alone. */
10523 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10524 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10528 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10530 static void
10531 local_define_builtin (const char *name, tree type, enum built_in_function code,
10532 const char *library_name, int ecf_flags)
10534 tree decl;
10536 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10537 library_name, NULL_TREE);
10538 set_call_expr_flags (decl, ecf_flags);
10540 set_builtin_decl (code, decl, true);
10543 /* Call this function after instantiating all builtins that the language
10544 front end cares about. This will build the rest of the builtins
10545 and internal functions that are relied upon by the tree optimizers and
10546 the middle-end. */
10548 void
10549 build_common_builtin_nodes (void)
10551 tree tmp, ftype;
10552 int ecf_flags;
10554 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10555 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10557 ftype = build_function_type (void_type_node, void_list_node);
10558 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10559 local_define_builtin ("__builtin_unreachable", ftype,
10560 BUILT_IN_UNREACHABLE,
10561 "__builtin_unreachable",
10562 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10563 | ECF_CONST | ECF_COLD);
10564 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10565 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10566 "abort",
10567 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10570 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10571 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10573 ftype = build_function_type_list (ptr_type_node,
10574 ptr_type_node, const_ptr_type_node,
10575 size_type_node, NULL_TREE);
10577 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10578 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10579 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10580 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10581 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10582 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10585 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10587 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10588 const_ptr_type_node, size_type_node,
10589 NULL_TREE);
10590 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10591 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10594 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10596 ftype = build_function_type_list (ptr_type_node,
10597 ptr_type_node, integer_type_node,
10598 size_type_node, NULL_TREE);
10599 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10600 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10603 /* If we're checking the stack, `alloca' can throw. */
10604 const int alloca_flags
10605 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10607 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10609 ftype = build_function_type_list (ptr_type_node,
10610 size_type_node, NULL_TREE);
10611 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10612 "alloca", alloca_flags);
10615 ftype = build_function_type_list (ptr_type_node, size_type_node,
10616 size_type_node, NULL_TREE);
10617 local_define_builtin ("__builtin_alloca_with_align", ftype,
10618 BUILT_IN_ALLOCA_WITH_ALIGN,
10619 "__builtin_alloca_with_align",
10620 alloca_flags);
10622 ftype = build_function_type_list (ptr_type_node, size_type_node,
10623 size_type_node, size_type_node, NULL_TREE);
10624 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10625 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10626 "__builtin_alloca_with_align_and_max",
10627 alloca_flags);
10629 ftype = build_function_type_list (void_type_node,
10630 ptr_type_node, ptr_type_node,
10631 ptr_type_node, NULL_TREE);
10632 local_define_builtin ("__builtin_init_trampoline", ftype,
10633 BUILT_IN_INIT_TRAMPOLINE,
10634 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10635 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10636 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10637 "__builtin_init_heap_trampoline",
10638 ECF_NOTHROW | ECF_LEAF);
10639 local_define_builtin ("__builtin_init_descriptor", ftype,
10640 BUILT_IN_INIT_DESCRIPTOR,
10641 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10643 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10644 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10645 BUILT_IN_ADJUST_TRAMPOLINE,
10646 "__builtin_adjust_trampoline",
10647 ECF_CONST | ECF_NOTHROW);
10648 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10649 BUILT_IN_ADJUST_DESCRIPTOR,
10650 "__builtin_adjust_descriptor",
10651 ECF_CONST | ECF_NOTHROW);
10653 ftype = build_function_type_list (void_type_node,
10654 ptr_type_node, ptr_type_node, NULL_TREE);
10655 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10656 BUILT_IN_NONLOCAL_GOTO,
10657 "__builtin_nonlocal_goto",
10658 ECF_NORETURN | ECF_NOTHROW);
10660 ftype = build_function_type_list (void_type_node,
10661 ptr_type_node, ptr_type_node, NULL_TREE);
10662 local_define_builtin ("__builtin_setjmp_setup", ftype,
10663 BUILT_IN_SETJMP_SETUP,
10664 "__builtin_setjmp_setup", ECF_NOTHROW);
10666 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10667 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10668 BUILT_IN_SETJMP_RECEIVER,
10669 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10671 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10672 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10673 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10675 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10676 local_define_builtin ("__builtin_stack_restore", ftype,
10677 BUILT_IN_STACK_RESTORE,
10678 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10680 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10681 const_ptr_type_node, size_type_node,
10682 NULL_TREE);
10683 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10684 "__builtin_memcmp_eq",
10685 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10687 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10688 "__builtin_strncmp_eq",
10689 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10691 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10692 "__builtin_strcmp_eq",
10693 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10695 /* If there's a possibility that we might use the ARM EABI, build the
10696 alternate __cxa_end_cleanup node used to resume from C++. */
10697 if (targetm.arm_eabi_unwinder)
10699 ftype = build_function_type_list (void_type_node, NULL_TREE);
10700 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10701 BUILT_IN_CXA_END_CLEANUP,
10702 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10705 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10706 local_define_builtin ("__builtin_unwind_resume", ftype,
10707 BUILT_IN_UNWIND_RESUME,
10708 ((targetm_common.except_unwind_info (&global_options)
10709 == UI_SJLJ)
10710 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10711 ECF_NORETURN);
10713 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10715 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10716 NULL_TREE);
10717 local_define_builtin ("__builtin_return_address", ftype,
10718 BUILT_IN_RETURN_ADDRESS,
10719 "__builtin_return_address",
10720 ECF_NOTHROW);
10723 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10724 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10726 ftype = build_function_type_list (void_type_node, ptr_type_node,
10727 ptr_type_node, NULL_TREE);
10728 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10729 local_define_builtin ("__cyg_profile_func_enter", ftype,
10730 BUILT_IN_PROFILE_FUNC_ENTER,
10731 "__cyg_profile_func_enter", 0);
10732 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10733 local_define_builtin ("__cyg_profile_func_exit", ftype,
10734 BUILT_IN_PROFILE_FUNC_EXIT,
10735 "__cyg_profile_func_exit", 0);
10738 /* The exception object and filter values from the runtime. The argument
10739 must be zero before exception lowering, i.e. from the front end. After
10740 exception lowering, it will be the region number for the exception
10741 landing pad. These functions are PURE instead of CONST to prevent
10742 them from being hoisted past the exception edge that will initialize
10743 its value in the landing pad. */
10744 ftype = build_function_type_list (ptr_type_node,
10745 integer_type_node, NULL_TREE);
10746 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10747 /* Only use TM_PURE if we have TM language support. */
10748 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10749 ecf_flags |= ECF_TM_PURE;
10750 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10751 "__builtin_eh_pointer", ecf_flags);
10753 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10754 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10755 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10756 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10758 ftype = build_function_type_list (void_type_node,
10759 integer_type_node, integer_type_node,
10760 NULL_TREE);
10761 local_define_builtin ("__builtin_eh_copy_values", ftype,
10762 BUILT_IN_EH_COPY_VALUES,
10763 "__builtin_eh_copy_values", ECF_NOTHROW);
10765 /* Complex multiplication and division. These are handled as builtins
10766 rather than optabs because emit_library_call_value doesn't support
10767 complex. Further, we can do slightly better with folding these
10768 beasties if the real and complex parts of the arguments are separate. */
10770 int mode;
10772 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10774 char mode_name_buf[4], *q;
10775 const char *p;
10776 enum built_in_function mcode, dcode;
10777 tree type, inner_type;
10778 const char *prefix = "__";
10780 if (targetm.libfunc_gnu_prefix)
10781 prefix = "__gnu_";
10783 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10784 if (type == NULL)
10785 continue;
10786 inner_type = TREE_TYPE (type);
10788 ftype = build_function_type_list (type, inner_type, inner_type,
10789 inner_type, inner_type, NULL_TREE);
10791 mcode = ((enum built_in_function)
10792 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10793 dcode = ((enum built_in_function)
10794 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10796 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10797 *q = TOLOWER (*p);
10798 *q = '\0';
10800 /* For -ftrapping-math these should throw from a former
10801 -fnon-call-exception stmt. */
10802 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10803 NULL);
10804 local_define_builtin (built_in_names[mcode], ftype, mcode,
10805 built_in_names[mcode],
10806 ECF_CONST | ECF_LEAF);
10808 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10809 NULL);
10810 local_define_builtin (built_in_names[dcode], ftype, dcode,
10811 built_in_names[dcode],
10812 ECF_CONST | ECF_LEAF);
10816 init_internal_fns ();
10819 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10820 better way.
10822 If we requested a pointer to a vector, build up the pointers that
10823 we stripped off while looking for the inner type. Similarly for
10824 return values from functions.
10826 The argument TYPE is the top of the chain, and BOTTOM is the
10827 new type which we will point to. */
10829 tree
10830 reconstruct_complex_type (tree type, tree bottom)
10832 tree inner, outer;
10834 if (TREE_CODE (type) == POINTER_TYPE)
10836 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10837 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10838 TYPE_REF_CAN_ALIAS_ALL (type));
10840 else if (TREE_CODE (type) == REFERENCE_TYPE)
10842 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10843 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10844 TYPE_REF_CAN_ALIAS_ALL (type));
10846 else if (TREE_CODE (type) == ARRAY_TYPE)
10848 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10849 outer = build_array_type (inner, TYPE_DOMAIN (type));
10851 else if (TREE_CODE (type) == FUNCTION_TYPE)
10853 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10854 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10856 else if (TREE_CODE (type) == METHOD_TYPE)
10858 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10859 /* The build_method_type_directly() routine prepends 'this' to argument list,
10860 so we must compensate by getting rid of it. */
10861 outer
10862 = build_method_type_directly
10863 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10864 inner,
10865 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10867 else if (TREE_CODE (type) == OFFSET_TYPE)
10869 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10870 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10872 else
10873 return bottom;
10875 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10876 TYPE_QUALS (type));
10879 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10880 the inner type. */
10881 tree
10882 build_vector_type_for_mode (tree innertype, machine_mode mode)
10884 poly_int64 nunits;
10885 unsigned int bitsize;
10887 switch (GET_MODE_CLASS (mode))
10889 case MODE_VECTOR_BOOL:
10890 case MODE_VECTOR_INT:
10891 case MODE_VECTOR_FLOAT:
10892 case MODE_VECTOR_FRACT:
10893 case MODE_VECTOR_UFRACT:
10894 case MODE_VECTOR_ACCUM:
10895 case MODE_VECTOR_UACCUM:
10896 nunits = GET_MODE_NUNITS (mode);
10897 break;
10899 case MODE_INT:
10900 /* Check that there are no leftover bits. */
10901 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10902 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10903 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10904 break;
10906 default:
10907 gcc_unreachable ();
10910 return make_vector_type (innertype, nunits, mode);
10913 /* Similarly, but takes the inner type and number of units, which must be
10914 a power of two. */
10916 tree
10917 build_vector_type (tree innertype, poly_int64 nunits)
10919 return make_vector_type (innertype, nunits, VOIDmode);
10922 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10924 tree
10925 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10927 gcc_assert (mask_mode != BLKmode);
10929 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10930 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10931 tree bool_type = build_nonstandard_boolean_type (esize);
10933 return make_vector_type (bool_type, nunits, mask_mode);
10936 /* Build a vector type that holds one boolean result for each element of
10937 vector type VECTYPE. The public interface for this operation is
10938 truth_type_for. */
10940 static tree
10941 build_truth_vector_type_for (tree vectype)
10943 machine_mode vector_mode = TYPE_MODE (vectype);
10944 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10946 machine_mode mask_mode;
10947 if (VECTOR_MODE_P (vector_mode)
10948 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10949 return build_truth_vector_type_for_mode (nunits, mask_mode);
10951 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10952 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10953 tree bool_type = build_nonstandard_boolean_type (esize);
10955 return make_vector_type (bool_type, nunits, BLKmode);
10958 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10959 set. */
10961 tree
10962 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10964 tree t = make_vector_type (innertype, nunits, VOIDmode);
10965 tree cand;
10966 /* We always build the non-opaque variant before the opaque one,
10967 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10968 cand = TYPE_NEXT_VARIANT (t);
10969 if (cand
10970 && TYPE_VECTOR_OPAQUE (cand)
10971 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10972 return cand;
10973 /* Othewise build a variant type and make sure to queue it after
10974 the non-opaque type. */
10975 cand = build_distinct_type_copy (t);
10976 TYPE_VECTOR_OPAQUE (cand) = true;
10977 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10978 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10979 TYPE_NEXT_VARIANT (t) = cand;
10980 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10981 return cand;
10984 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10986 wide_int
10987 vector_cst_int_elt (const_tree t, unsigned int i)
10989 /* First handle elements that are directly encoded. */
10990 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10991 if (i < encoded_nelts)
10992 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10994 /* Identify the pattern that contains element I and work out the index of
10995 the last encoded element for that pattern. */
10996 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10997 unsigned int pattern = i % npatterns;
10998 unsigned int count = i / npatterns;
10999 unsigned int final_i = encoded_nelts - npatterns + pattern;
11001 /* If there are no steps, the final encoded value is the right one. */
11002 if (!VECTOR_CST_STEPPED_P (t))
11003 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11005 /* Otherwise work out the value from the last two encoded elements. */
11006 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11007 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11008 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11009 return wi::to_wide (v2) + (count - 2) * diff;
11012 /* Return the value of element I of VECTOR_CST T. */
11014 tree
11015 vector_cst_elt (const_tree t, unsigned int i)
11017 /* First handle elements that are directly encoded. */
11018 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11019 if (i < encoded_nelts)
11020 return VECTOR_CST_ENCODED_ELT (t, i);
11022 /* If there are no steps, the final encoded value is the right one. */
11023 if (!VECTOR_CST_STEPPED_P (t))
11025 /* Identify the pattern that contains element I and work out the index of
11026 the last encoded element for that pattern. */
11027 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11028 unsigned int pattern = i % npatterns;
11029 unsigned int final_i = encoded_nelts - npatterns + pattern;
11030 return VECTOR_CST_ENCODED_ELT (t, final_i);
11033 /* Otherwise work out the value from the last two encoded elements. */
11034 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11035 vector_cst_int_elt (t, i));
11038 /* Given an initializer INIT, return TRUE if INIT is zero or some
11039 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11040 null, set *NONZERO if and only if INIT is known not to be all
11041 zeros. The combination of return value of false and *NONZERO
11042 false implies that INIT may but need not be all zeros. Other
11043 combinations indicate definitive answers. */
11045 bool
11046 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11048 bool dummy;
11049 if (!nonzero)
11050 nonzero = &dummy;
11052 /* Conservatively clear NONZERO and set it only if INIT is definitely
11053 not all zero. */
11054 *nonzero = false;
11056 STRIP_NOPS (init);
11058 unsigned HOST_WIDE_INT off = 0;
11060 switch (TREE_CODE (init))
11062 case INTEGER_CST:
11063 if (integer_zerop (init))
11064 return true;
11066 *nonzero = true;
11067 return false;
11069 case REAL_CST:
11070 /* ??? Note that this is not correct for C4X float formats. There,
11071 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11072 negative exponent. */
11073 if (real_zerop (init)
11074 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11075 return true;
11077 *nonzero = true;
11078 return false;
11080 case FIXED_CST:
11081 if (fixed_zerop (init))
11082 return true;
11084 *nonzero = true;
11085 return false;
11087 case COMPLEX_CST:
11088 if (integer_zerop (init)
11089 || (real_zerop (init)
11090 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11091 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11092 return true;
11094 *nonzero = true;
11095 return false;
11097 case VECTOR_CST:
11098 if (VECTOR_CST_NPATTERNS (init) == 1
11099 && VECTOR_CST_DUPLICATE_P (init)
11100 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11101 return true;
11103 *nonzero = true;
11104 return false;
11106 case CONSTRUCTOR:
11108 if (TREE_CLOBBER_P (init))
11109 return false;
11111 unsigned HOST_WIDE_INT idx;
11112 tree elt;
11114 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11115 if (!initializer_zerop (elt, nonzero))
11116 return false;
11118 return true;
11121 case MEM_REF:
11123 tree arg = TREE_OPERAND (init, 0);
11124 if (TREE_CODE (arg) != ADDR_EXPR)
11125 return false;
11126 tree offset = TREE_OPERAND (init, 1);
11127 if (TREE_CODE (offset) != INTEGER_CST
11128 || !tree_fits_uhwi_p (offset))
11129 return false;
11130 off = tree_to_uhwi (offset);
11131 if (INT_MAX < off)
11132 return false;
11133 arg = TREE_OPERAND (arg, 0);
11134 if (TREE_CODE (arg) != STRING_CST)
11135 return false;
11136 init = arg;
11138 /* Fall through. */
11140 case STRING_CST:
11142 gcc_assert (off <= INT_MAX);
11144 int i = off;
11145 int n = TREE_STRING_LENGTH (init);
11146 if (n <= i)
11147 return false;
11149 /* We need to loop through all elements to handle cases like
11150 "\0" and "\0foobar". */
11151 for (i = 0; i < n; ++i)
11152 if (TREE_STRING_POINTER (init)[i] != '\0')
11154 *nonzero = true;
11155 return false;
11158 return true;
11161 default:
11162 return false;
11166 /* Return true if EXPR is an initializer expression in which every element
11167 is a constant that is numerically equal to 0 or 1. The elements do not
11168 need to be equal to each other. */
11170 bool
11171 initializer_each_zero_or_onep (const_tree expr)
11173 STRIP_ANY_LOCATION_WRAPPER (expr);
11175 switch (TREE_CODE (expr))
11177 case INTEGER_CST:
11178 return integer_zerop (expr) || integer_onep (expr);
11180 case REAL_CST:
11181 return real_zerop (expr) || real_onep (expr);
11183 case VECTOR_CST:
11185 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11186 if (VECTOR_CST_STEPPED_P (expr)
11187 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11188 return false;
11190 for (unsigned int i = 0; i < nelts; ++i)
11192 tree elt = vector_cst_elt (expr, i);
11193 if (!initializer_each_zero_or_onep (elt))
11194 return false;
11197 return true;
11200 default:
11201 return false;
11205 /* Check if vector VEC consists of all the equal elements and
11206 that the number of elements corresponds to the type of VEC.
11207 The function returns first element of the vector
11208 or NULL_TREE if the vector is not uniform. */
11209 tree
11210 uniform_vector_p (const_tree vec)
11212 tree first, t;
11213 unsigned HOST_WIDE_INT i, nelts;
11215 if (vec == NULL_TREE)
11216 return NULL_TREE;
11218 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11220 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11221 return TREE_OPERAND (vec, 0);
11223 else if (TREE_CODE (vec) == VECTOR_CST)
11225 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11226 return VECTOR_CST_ENCODED_ELT (vec, 0);
11227 return NULL_TREE;
11230 else if (TREE_CODE (vec) == CONSTRUCTOR
11231 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11233 first = error_mark_node;
11235 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11237 if (i == 0)
11239 first = t;
11240 continue;
11242 if (!operand_equal_p (first, t, 0))
11243 return NULL_TREE;
11245 if (i != nelts)
11246 return NULL_TREE;
11248 return first;
11251 return NULL_TREE;
11254 /* If the argument is INTEGER_CST, return it. If the argument is vector
11255 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11256 return NULL_TREE.
11257 Look through location wrappers. */
11259 tree
11260 uniform_integer_cst_p (tree t)
11262 STRIP_ANY_LOCATION_WRAPPER (t);
11264 if (TREE_CODE (t) == INTEGER_CST)
11265 return t;
11267 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11269 t = uniform_vector_p (t);
11270 if (t && TREE_CODE (t) == INTEGER_CST)
11271 return t;
11274 return NULL_TREE;
11277 /* If VECTOR_CST T has a single nonzero element, return the index of that
11278 element, otherwise return -1. */
11281 single_nonzero_element (const_tree t)
11283 unsigned HOST_WIDE_INT nelts;
11284 unsigned int repeat_nelts;
11285 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11286 repeat_nelts = nelts;
11287 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11289 nelts = vector_cst_encoded_nelts (t);
11290 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11292 else
11293 return -1;
11295 int res = -1;
11296 for (unsigned int i = 0; i < nelts; ++i)
11298 tree elt = vector_cst_elt (t, i);
11299 if (!integer_zerop (elt) && !real_zerop (elt))
11301 if (res >= 0 || i >= repeat_nelts)
11302 return -1;
11303 res = i;
11306 return res;
11309 /* Build an empty statement at location LOC. */
11311 tree
11312 build_empty_stmt (location_t loc)
11314 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11315 SET_EXPR_LOCATION (t, loc);
11316 return t;
11320 /* Build an OpenMP clause with code CODE. LOC is the location of the
11321 clause. */
11323 tree
11324 build_omp_clause (location_t loc, enum omp_clause_code code)
11326 tree t;
11327 int size, length;
11329 length = omp_clause_num_ops[code];
11330 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11332 record_node_allocation_statistics (OMP_CLAUSE, size);
11334 t = (tree) ggc_internal_alloc (size);
11335 memset (t, 0, size);
11336 TREE_SET_CODE (t, OMP_CLAUSE);
11337 OMP_CLAUSE_SET_CODE (t, code);
11338 OMP_CLAUSE_LOCATION (t) = loc;
11340 return t;
11343 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11344 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11345 Except for the CODE and operand count field, other storage for the
11346 object is initialized to zeros. */
11348 tree
11349 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11351 tree t;
11352 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11354 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11355 gcc_assert (len >= 1);
11357 record_node_allocation_statistics (code, length);
11359 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11361 TREE_SET_CODE (t, code);
11363 /* Can't use TREE_OPERAND to store the length because if checking is
11364 enabled, it will try to check the length before we store it. :-P */
11365 t->exp.operands[0] = build_int_cst (sizetype, len);
11367 return t;
11370 /* Helper function for build_call_* functions; build a CALL_EXPR with
11371 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11372 the argument slots. */
11374 static tree
11375 build_call_1 (tree return_type, tree fn, int nargs)
11377 tree t;
11379 t = build_vl_exp (CALL_EXPR, nargs + 3);
11380 TREE_TYPE (t) = return_type;
11381 CALL_EXPR_FN (t) = fn;
11382 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11384 return t;
11387 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11388 FN and a null static chain slot. NARGS is the number of call arguments
11389 which are specified as "..." arguments. */
11391 tree
11392 build_call_nary (tree return_type, tree fn, int nargs, ...)
11394 tree ret;
11395 va_list args;
11396 va_start (args, nargs);
11397 ret = build_call_valist (return_type, fn, nargs, args);
11398 va_end (args);
11399 return ret;
11402 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11403 FN and a null static chain slot. NARGS is the number of call arguments
11404 which are specified as a va_list ARGS. */
11406 tree
11407 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11409 tree t;
11410 int i;
11412 t = build_call_1 (return_type, fn, nargs);
11413 for (i = 0; i < nargs; i++)
11414 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11415 process_call_operands (t);
11416 return t;
11419 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11420 FN and a null static chain slot. NARGS is the number of call arguments
11421 which are specified as a tree array ARGS. */
11423 tree
11424 build_call_array_loc (location_t loc, tree return_type, tree fn,
11425 int nargs, const tree *args)
11427 tree t;
11428 int i;
11430 t = build_call_1 (return_type, fn, nargs);
11431 for (i = 0; i < nargs; i++)
11432 CALL_EXPR_ARG (t, i) = args[i];
11433 process_call_operands (t);
11434 SET_EXPR_LOCATION (t, loc);
11435 return t;
11438 /* Like build_call_array, but takes a vec. */
11440 tree
11441 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11443 tree ret, t;
11444 unsigned int ix;
11446 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11447 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11448 CALL_EXPR_ARG (ret, ix) = t;
11449 process_call_operands (ret);
11450 return ret;
11453 /* Conveniently construct a function call expression. FNDECL names the
11454 function to be called and N arguments are passed in the array
11455 ARGARRAY. */
11457 tree
11458 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11460 tree fntype = TREE_TYPE (fndecl);
11461 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11463 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11466 /* Conveniently construct a function call expression. FNDECL names the
11467 function to be called and the arguments are passed in the vector
11468 VEC. */
11470 tree
11471 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11473 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11474 vec_safe_address (vec));
11478 /* Conveniently construct a function call expression. FNDECL names the
11479 function to be called, N is the number of arguments, and the "..."
11480 parameters are the argument expressions. */
11482 tree
11483 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11485 va_list ap;
11486 tree *argarray = XALLOCAVEC (tree, n);
11487 int i;
11489 va_start (ap, n);
11490 for (i = 0; i < n; i++)
11491 argarray[i] = va_arg (ap, tree);
11492 va_end (ap);
11493 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11496 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11497 varargs macros aren't supported by all bootstrap compilers. */
11499 tree
11500 build_call_expr (tree fndecl, int n, ...)
11502 va_list ap;
11503 tree *argarray = XALLOCAVEC (tree, n);
11504 int i;
11506 va_start (ap, n);
11507 for (i = 0; i < n; i++)
11508 argarray[i] = va_arg (ap, tree);
11509 va_end (ap);
11510 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11513 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11514 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11515 It will get gimplified later into an ordinary internal function. */
11517 tree
11518 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11519 tree type, int n, const tree *args)
11521 tree t = build_call_1 (type, NULL_TREE, n);
11522 for (int i = 0; i < n; ++i)
11523 CALL_EXPR_ARG (t, i) = args[i];
11524 SET_EXPR_LOCATION (t, loc);
11525 CALL_EXPR_IFN (t) = ifn;
11526 process_call_operands (t);
11527 return t;
11530 /* Build internal call expression. This is just like CALL_EXPR, except
11531 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11532 internal function. */
11534 tree
11535 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11536 tree type, int n, ...)
11538 va_list ap;
11539 tree *argarray = XALLOCAVEC (tree, n);
11540 int i;
11542 va_start (ap, n);
11543 for (i = 0; i < n; i++)
11544 argarray[i] = va_arg (ap, tree);
11545 va_end (ap);
11546 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11549 /* Return a function call to FN, if the target is guaranteed to support it,
11550 or null otherwise.
11552 N is the number of arguments, passed in the "...", and TYPE is the
11553 type of the return value. */
11555 tree
11556 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11557 int n, ...)
11559 va_list ap;
11560 tree *argarray = XALLOCAVEC (tree, n);
11561 int i;
11563 va_start (ap, n);
11564 for (i = 0; i < n; i++)
11565 argarray[i] = va_arg (ap, tree);
11566 va_end (ap);
11567 if (internal_fn_p (fn))
11569 internal_fn ifn = as_internal_fn (fn);
11570 if (direct_internal_fn_p (ifn))
11572 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11573 if (!direct_internal_fn_supported_p (ifn, types,
11574 OPTIMIZE_FOR_BOTH))
11575 return NULL_TREE;
11577 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11579 else
11581 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11582 if (!fndecl)
11583 return NULL_TREE;
11584 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11588 /* Return a function call to the appropriate builtin alloca variant.
11590 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11591 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11592 bound for SIZE in case it is not a fixed value. */
11594 tree
11595 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11597 if (max_size >= 0)
11599 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11600 return
11601 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11603 else if (align > 0)
11605 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11606 return build_call_expr (t, 2, size, size_int (align));
11608 else
11610 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11611 return build_call_expr (t, 1, size);
11615 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11616 if SIZE == -1) and return a tree node representing char* pointer to
11617 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11618 is the LEN bytes at STR (the representation of the string, which may
11619 be wide). */
11621 tree
11622 build_string_literal (int len, const char *str,
11623 tree eltype /* = char_type_node */,
11624 unsigned HOST_WIDE_INT size /* = -1 */)
11626 tree t = build_string (len, str);
11627 /* Set the maximum valid index based on the string length or SIZE. */
11628 unsigned HOST_WIDE_INT maxidx
11629 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11631 tree index = build_index_type (size_int (maxidx));
11632 eltype = build_type_variant (eltype, 1, 0);
11633 tree type = build_array_type (eltype, index);
11634 TREE_TYPE (t) = type;
11635 TREE_CONSTANT (t) = 1;
11636 TREE_READONLY (t) = 1;
11637 TREE_STATIC (t) = 1;
11639 type = build_pointer_type (eltype);
11640 t = build1 (ADDR_EXPR, type,
11641 build4 (ARRAY_REF, eltype,
11642 t, integer_zero_node, NULL_TREE, NULL_TREE));
11643 return t;
11648 /* Return true if T (assumed to be a DECL) must be assigned a memory
11649 location. */
11651 bool
11652 needs_to_live_in_memory (const_tree t)
11654 return (TREE_ADDRESSABLE (t)
11655 || is_global_var (t)
11656 || (TREE_CODE (t) == RESULT_DECL
11657 && !DECL_BY_REFERENCE (t)
11658 && aggregate_value_p (t, current_function_decl)));
11661 /* Return value of a constant X and sign-extend it. */
11663 HOST_WIDE_INT
11664 int_cst_value (const_tree x)
11666 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11667 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11669 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11670 gcc_assert (cst_and_fits_in_hwi (x));
11672 if (bits < HOST_BITS_PER_WIDE_INT)
11674 bool negative = ((val >> (bits - 1)) & 1) != 0;
11675 if (negative)
11676 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11677 else
11678 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11681 return val;
11684 /* If TYPE is an integral or pointer type, return an integer type with
11685 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11686 if TYPE is already an integer type of signedness UNSIGNEDP.
11687 If TYPE is a floating-point type, return an integer type with the same
11688 bitsize and with the signedness given by UNSIGNEDP; this is useful
11689 when doing bit-level operations on a floating-point value. */
11691 tree
11692 signed_or_unsigned_type_for (int unsignedp, tree type)
11694 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11695 return type;
11697 if (TREE_CODE (type) == VECTOR_TYPE)
11699 tree inner = TREE_TYPE (type);
11700 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11701 if (!inner2)
11702 return NULL_TREE;
11703 if (inner == inner2)
11704 return type;
11705 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11708 if (TREE_CODE (type) == COMPLEX_TYPE)
11710 tree inner = TREE_TYPE (type);
11711 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11712 if (!inner2)
11713 return NULL_TREE;
11714 if (inner == inner2)
11715 return type;
11716 return build_complex_type (inner2);
11719 unsigned int bits;
11720 if (INTEGRAL_TYPE_P (type)
11721 || POINTER_TYPE_P (type)
11722 || TREE_CODE (type) == OFFSET_TYPE)
11723 bits = TYPE_PRECISION (type);
11724 else if (TREE_CODE (type) == REAL_TYPE)
11725 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11726 else
11727 return NULL_TREE;
11729 return build_nonstandard_integer_type (bits, unsignedp);
11732 /* If TYPE is an integral or pointer type, return an integer type with
11733 the same precision which is unsigned, or itself if TYPE is already an
11734 unsigned integer type. If TYPE is a floating-point type, return an
11735 unsigned integer type with the same bitsize as TYPE. */
11737 tree
11738 unsigned_type_for (tree type)
11740 return signed_or_unsigned_type_for (1, type);
11743 /* If TYPE is an integral or pointer type, return an integer type with
11744 the same precision which is signed, or itself if TYPE is already a
11745 signed integer type. If TYPE is a floating-point type, return a
11746 signed integer type with the same bitsize as TYPE. */
11748 tree
11749 signed_type_for (tree type)
11751 return signed_or_unsigned_type_for (0, type);
11754 /* If TYPE is a vector type, return a signed integer vector type with the
11755 same width and number of subparts. Otherwise return boolean_type_node. */
11757 tree
11758 truth_type_for (tree type)
11760 if (TREE_CODE (type) == VECTOR_TYPE)
11762 if (VECTOR_BOOLEAN_TYPE_P (type))
11763 return type;
11764 return build_truth_vector_type_for (type);
11766 else
11767 return boolean_type_node;
11770 /* Returns the largest value obtainable by casting something in INNER type to
11771 OUTER type. */
11773 tree
11774 upper_bound_in_type (tree outer, tree inner)
11776 unsigned int det = 0;
11777 unsigned oprec = TYPE_PRECISION (outer);
11778 unsigned iprec = TYPE_PRECISION (inner);
11779 unsigned prec;
11781 /* Compute a unique number for every combination. */
11782 det |= (oprec > iprec) ? 4 : 0;
11783 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11784 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11786 /* Determine the exponent to use. */
11787 switch (det)
11789 case 0:
11790 case 1:
11791 /* oprec <= iprec, outer: signed, inner: don't care. */
11792 prec = oprec - 1;
11793 break;
11794 case 2:
11795 case 3:
11796 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11797 prec = oprec;
11798 break;
11799 case 4:
11800 /* oprec > iprec, outer: signed, inner: signed. */
11801 prec = iprec - 1;
11802 break;
11803 case 5:
11804 /* oprec > iprec, outer: signed, inner: unsigned. */
11805 prec = iprec;
11806 break;
11807 case 6:
11808 /* oprec > iprec, outer: unsigned, inner: signed. */
11809 prec = oprec;
11810 break;
11811 case 7:
11812 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11813 prec = iprec;
11814 break;
11815 default:
11816 gcc_unreachable ();
11819 return wide_int_to_tree (outer,
11820 wi::mask (prec, false, TYPE_PRECISION (outer)));
11823 /* Returns the smallest value obtainable by casting something in INNER type to
11824 OUTER type. */
11826 tree
11827 lower_bound_in_type (tree outer, tree inner)
11829 unsigned oprec = TYPE_PRECISION (outer);
11830 unsigned iprec = TYPE_PRECISION (inner);
11832 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11833 and obtain 0. */
11834 if (TYPE_UNSIGNED (outer)
11835 /* If we are widening something of an unsigned type, OUTER type
11836 contains all values of INNER type. In particular, both INNER
11837 and OUTER types have zero in common. */
11838 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11839 return build_int_cst (outer, 0);
11840 else
11842 /* If we are widening a signed type to another signed type, we
11843 want to obtain -2^^(iprec-1). If we are keeping the
11844 precision or narrowing to a signed type, we want to obtain
11845 -2^(oprec-1). */
11846 unsigned prec = oprec > iprec ? iprec : oprec;
11847 return wide_int_to_tree (outer,
11848 wi::mask (prec - 1, true,
11849 TYPE_PRECISION (outer)));
11853 /* Return nonzero if two operands that are suitable for PHI nodes are
11854 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11855 SSA_NAME or invariant. Note that this is strictly an optimization.
11856 That is, callers of this function can directly call operand_equal_p
11857 and get the same result, only slower. */
11860 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11862 if (arg0 == arg1)
11863 return 1;
11864 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11865 return 0;
11866 return operand_equal_p (arg0, arg1, 0);
11869 /* Returns number of zeros at the end of binary representation of X. */
11871 tree
11872 num_ending_zeros (const_tree x)
11874 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11878 #define WALK_SUBTREE(NODE) \
11879 do \
11881 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11882 if (result) \
11883 return result; \
11885 while (0)
11887 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11888 be walked whenever a type is seen in the tree. Rest of operands and return
11889 value are as for walk_tree. */
11891 static tree
11892 walk_type_fields (tree type, walk_tree_fn func, void *data,
11893 hash_set<tree> *pset, walk_tree_lh lh)
11895 tree result = NULL_TREE;
11897 switch (TREE_CODE (type))
11899 case POINTER_TYPE:
11900 case REFERENCE_TYPE:
11901 case VECTOR_TYPE:
11902 /* We have to worry about mutually recursive pointers. These can't
11903 be written in C. They can in Ada. It's pathological, but
11904 there's an ACATS test (c38102a) that checks it. Deal with this
11905 by checking if we're pointing to another pointer, that one
11906 points to another pointer, that one does too, and we have no htab.
11907 If so, get a hash table. We check three levels deep to avoid
11908 the cost of the hash table if we don't need one. */
11909 if (POINTER_TYPE_P (TREE_TYPE (type))
11910 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11911 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11912 && !pset)
11914 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11915 func, data);
11916 if (result)
11917 return result;
11919 break;
11922 /* fall through */
11924 case COMPLEX_TYPE:
11925 WALK_SUBTREE (TREE_TYPE (type));
11926 break;
11928 case METHOD_TYPE:
11929 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11931 /* Fall through. */
11933 case FUNCTION_TYPE:
11934 WALK_SUBTREE (TREE_TYPE (type));
11936 tree arg;
11938 /* We never want to walk into default arguments. */
11939 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11940 WALK_SUBTREE (TREE_VALUE (arg));
11942 break;
11944 case ARRAY_TYPE:
11945 /* Don't follow this nodes's type if a pointer for fear that
11946 we'll have infinite recursion. If we have a PSET, then we
11947 need not fear. */
11948 if (pset
11949 || (!POINTER_TYPE_P (TREE_TYPE (type))
11950 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11951 WALK_SUBTREE (TREE_TYPE (type));
11952 WALK_SUBTREE (TYPE_DOMAIN (type));
11953 break;
11955 case OFFSET_TYPE:
11956 WALK_SUBTREE (TREE_TYPE (type));
11957 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11958 break;
11960 default:
11961 break;
11964 return NULL_TREE;
11967 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11968 called with the DATA and the address of each sub-tree. If FUNC returns a
11969 non-NULL value, the traversal is stopped, and the value returned by FUNC
11970 is returned. If PSET is non-NULL it is used to record the nodes visited,
11971 and to avoid visiting a node more than once. */
11973 tree
11974 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11975 hash_set<tree> *pset, walk_tree_lh lh)
11977 enum tree_code code;
11978 int walk_subtrees;
11979 tree result;
11981 #define WALK_SUBTREE_TAIL(NODE) \
11982 do \
11984 tp = & (NODE); \
11985 goto tail_recurse; \
11987 while (0)
11989 tail_recurse:
11990 /* Skip empty subtrees. */
11991 if (!*tp)
11992 return NULL_TREE;
11994 /* Don't walk the same tree twice, if the user has requested
11995 that we avoid doing so. */
11996 if (pset && pset->add (*tp))
11997 return NULL_TREE;
11999 /* Call the function. */
12000 walk_subtrees = 1;
12001 result = (*func) (tp, &walk_subtrees, data);
12003 /* If we found something, return it. */
12004 if (result)
12005 return result;
12007 code = TREE_CODE (*tp);
12009 /* Even if we didn't, FUNC may have decided that there was nothing
12010 interesting below this point in the tree. */
12011 if (!walk_subtrees)
12013 /* But we still need to check our siblings. */
12014 if (code == TREE_LIST)
12015 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12016 else if (code == OMP_CLAUSE)
12017 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12018 else
12019 return NULL_TREE;
12022 if (lh)
12024 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12025 if (result || !walk_subtrees)
12026 return result;
12029 switch (code)
12031 case ERROR_MARK:
12032 case IDENTIFIER_NODE:
12033 case INTEGER_CST:
12034 case REAL_CST:
12035 case FIXED_CST:
12036 case VECTOR_CST:
12037 case STRING_CST:
12038 case BLOCK:
12039 case PLACEHOLDER_EXPR:
12040 case SSA_NAME:
12041 case FIELD_DECL:
12042 case RESULT_DECL:
12043 /* None of these have subtrees other than those already walked
12044 above. */
12045 break;
12047 case TREE_LIST:
12048 WALK_SUBTREE (TREE_VALUE (*tp));
12049 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12050 break;
12052 case TREE_VEC:
12054 int len = TREE_VEC_LENGTH (*tp);
12056 if (len == 0)
12057 break;
12059 /* Walk all elements but the first. */
12060 while (--len)
12061 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12063 /* Now walk the first one as a tail call. */
12064 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12067 case COMPLEX_CST:
12068 WALK_SUBTREE (TREE_REALPART (*tp));
12069 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12071 case CONSTRUCTOR:
12073 unsigned HOST_WIDE_INT idx;
12074 constructor_elt *ce;
12076 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12077 idx++)
12078 WALK_SUBTREE (ce->value);
12080 break;
12082 case SAVE_EXPR:
12083 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12085 case BIND_EXPR:
12087 tree decl;
12088 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12090 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12091 into declarations that are just mentioned, rather than
12092 declared; they don't really belong to this part of the tree.
12093 And, we can see cycles: the initializer for a declaration
12094 can refer to the declaration itself. */
12095 WALK_SUBTREE (DECL_INITIAL (decl));
12096 WALK_SUBTREE (DECL_SIZE (decl));
12097 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12099 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12102 case STATEMENT_LIST:
12104 tree_stmt_iterator i;
12105 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12106 WALK_SUBTREE (*tsi_stmt_ptr (i));
12108 break;
12110 case OMP_CLAUSE:
12111 switch (OMP_CLAUSE_CODE (*tp))
12113 case OMP_CLAUSE_GANG:
12114 case OMP_CLAUSE__GRIDDIM_:
12115 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12116 /* FALLTHRU */
12118 case OMP_CLAUSE_ASYNC:
12119 case OMP_CLAUSE_WAIT:
12120 case OMP_CLAUSE_WORKER:
12121 case OMP_CLAUSE_VECTOR:
12122 case OMP_CLAUSE_NUM_GANGS:
12123 case OMP_CLAUSE_NUM_WORKERS:
12124 case OMP_CLAUSE_VECTOR_LENGTH:
12125 case OMP_CLAUSE_PRIVATE:
12126 case OMP_CLAUSE_SHARED:
12127 case OMP_CLAUSE_FIRSTPRIVATE:
12128 case OMP_CLAUSE_COPYIN:
12129 case OMP_CLAUSE_COPYPRIVATE:
12130 case OMP_CLAUSE_FINAL:
12131 case OMP_CLAUSE_IF:
12132 case OMP_CLAUSE_NUM_THREADS:
12133 case OMP_CLAUSE_SCHEDULE:
12134 case OMP_CLAUSE_UNIFORM:
12135 case OMP_CLAUSE_DEPEND:
12136 case OMP_CLAUSE_NONTEMPORAL:
12137 case OMP_CLAUSE_NUM_TEAMS:
12138 case OMP_CLAUSE_THREAD_LIMIT:
12139 case OMP_CLAUSE_DEVICE:
12140 case OMP_CLAUSE_DIST_SCHEDULE:
12141 case OMP_CLAUSE_SAFELEN:
12142 case OMP_CLAUSE_SIMDLEN:
12143 case OMP_CLAUSE_ORDERED:
12144 case OMP_CLAUSE_PRIORITY:
12145 case OMP_CLAUSE_GRAINSIZE:
12146 case OMP_CLAUSE_NUM_TASKS:
12147 case OMP_CLAUSE_HINT:
12148 case OMP_CLAUSE_TO_DECLARE:
12149 case OMP_CLAUSE_LINK:
12150 case OMP_CLAUSE_USE_DEVICE_PTR:
12151 case OMP_CLAUSE_USE_DEVICE_ADDR:
12152 case OMP_CLAUSE_IS_DEVICE_PTR:
12153 case OMP_CLAUSE_INCLUSIVE:
12154 case OMP_CLAUSE_EXCLUSIVE:
12155 case OMP_CLAUSE__LOOPTEMP_:
12156 case OMP_CLAUSE__REDUCTEMP_:
12157 case OMP_CLAUSE__CONDTEMP_:
12158 case OMP_CLAUSE__SCANTEMP_:
12159 case OMP_CLAUSE__SIMDUID_:
12160 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12161 /* FALLTHRU */
12163 case OMP_CLAUSE_INDEPENDENT:
12164 case OMP_CLAUSE_NOWAIT:
12165 case OMP_CLAUSE_DEFAULT:
12166 case OMP_CLAUSE_UNTIED:
12167 case OMP_CLAUSE_MERGEABLE:
12168 case OMP_CLAUSE_PROC_BIND:
12169 case OMP_CLAUSE_DEVICE_TYPE:
12170 case OMP_CLAUSE_INBRANCH:
12171 case OMP_CLAUSE_NOTINBRANCH:
12172 case OMP_CLAUSE_FOR:
12173 case OMP_CLAUSE_PARALLEL:
12174 case OMP_CLAUSE_SECTIONS:
12175 case OMP_CLAUSE_TASKGROUP:
12176 case OMP_CLAUSE_NOGROUP:
12177 case OMP_CLAUSE_THREADS:
12178 case OMP_CLAUSE_SIMD:
12179 case OMP_CLAUSE_DEFAULTMAP:
12180 case OMP_CLAUSE_ORDER:
12181 case OMP_CLAUSE_BIND:
12182 case OMP_CLAUSE_AUTO:
12183 case OMP_CLAUSE_SEQ:
12184 case OMP_CLAUSE_TILE:
12185 case OMP_CLAUSE__SIMT_:
12186 case OMP_CLAUSE_IF_PRESENT:
12187 case OMP_CLAUSE_FINALIZE:
12188 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12190 case OMP_CLAUSE_LASTPRIVATE:
12191 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12192 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12193 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12195 case OMP_CLAUSE_COLLAPSE:
12197 int i;
12198 for (i = 0; i < 3; i++)
12199 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12200 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12203 case OMP_CLAUSE_LINEAR:
12204 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12205 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12206 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12207 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12209 case OMP_CLAUSE_ALIGNED:
12210 case OMP_CLAUSE_FROM:
12211 case OMP_CLAUSE_TO:
12212 case OMP_CLAUSE_MAP:
12213 case OMP_CLAUSE__CACHE_:
12214 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12215 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12216 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12218 case OMP_CLAUSE_REDUCTION:
12219 case OMP_CLAUSE_TASK_REDUCTION:
12220 case OMP_CLAUSE_IN_REDUCTION:
12222 int i;
12223 for (i = 0; i < 5; i++)
12224 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12225 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12228 default:
12229 gcc_unreachable ();
12231 break;
12233 case TARGET_EXPR:
12235 int i, len;
12237 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12238 But, we only want to walk once. */
12239 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12240 for (i = 0; i < len; ++i)
12241 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12242 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12245 case DECL_EXPR:
12246 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12247 defining. We only want to walk into these fields of a type in this
12248 case and not in the general case of a mere reference to the type.
12250 The criterion is as follows: if the field can be an expression, it
12251 must be walked only here. This should be in keeping with the fields
12252 that are directly gimplified in gimplify_type_sizes in order for the
12253 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12254 variable-sized types.
12256 Note that DECLs get walked as part of processing the BIND_EXPR. */
12257 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12259 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12260 if (TREE_CODE (*type_p) == ERROR_MARK)
12261 return NULL_TREE;
12263 /* Call the function for the type. See if it returns anything or
12264 doesn't want us to continue. If we are to continue, walk both
12265 the normal fields and those for the declaration case. */
12266 result = (*func) (type_p, &walk_subtrees, data);
12267 if (result || !walk_subtrees)
12268 return result;
12270 /* But do not walk a pointed-to type since it may itself need to
12271 be walked in the declaration case if it isn't anonymous. */
12272 if (!POINTER_TYPE_P (*type_p))
12274 result = walk_type_fields (*type_p, func, data, pset, lh);
12275 if (result)
12276 return result;
12279 /* If this is a record type, also walk the fields. */
12280 if (RECORD_OR_UNION_TYPE_P (*type_p))
12282 tree field;
12284 for (field = TYPE_FIELDS (*type_p); field;
12285 field = DECL_CHAIN (field))
12287 /* We'd like to look at the type of the field, but we can
12288 easily get infinite recursion. So assume it's pointed
12289 to elsewhere in the tree. Also, ignore things that
12290 aren't fields. */
12291 if (TREE_CODE (field) != FIELD_DECL)
12292 continue;
12294 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12295 WALK_SUBTREE (DECL_SIZE (field));
12296 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12297 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12298 WALK_SUBTREE (DECL_QUALIFIER (field));
12302 /* Same for scalar types. */
12303 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12304 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12305 || TREE_CODE (*type_p) == INTEGER_TYPE
12306 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12307 || TREE_CODE (*type_p) == REAL_TYPE)
12309 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12310 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12313 WALK_SUBTREE (TYPE_SIZE (*type_p));
12314 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12316 /* FALLTHRU */
12318 default:
12319 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12321 int i, len;
12323 /* Walk over all the sub-trees of this operand. */
12324 len = TREE_OPERAND_LENGTH (*tp);
12326 /* Go through the subtrees. We need to do this in forward order so
12327 that the scope of a FOR_EXPR is handled properly. */
12328 if (len)
12330 for (i = 0; i < len - 1; ++i)
12331 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12332 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12335 /* If this is a type, walk the needed fields in the type. */
12336 else if (TYPE_P (*tp))
12337 return walk_type_fields (*tp, func, data, pset, lh);
12338 break;
12341 /* We didn't find what we were looking for. */
12342 return NULL_TREE;
12344 #undef WALK_SUBTREE_TAIL
12346 #undef WALK_SUBTREE
12348 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12350 tree
12351 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12352 walk_tree_lh lh)
12354 tree result;
12356 hash_set<tree> pset;
12357 result = walk_tree_1 (tp, func, data, &pset, lh);
12358 return result;
12362 tree
12363 tree_block (tree t)
12365 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12367 if (IS_EXPR_CODE_CLASS (c))
12368 return LOCATION_BLOCK (t->exp.locus);
12369 gcc_unreachable ();
12370 return NULL;
12373 void
12374 tree_set_block (tree t, tree b)
12376 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12378 if (IS_EXPR_CODE_CLASS (c))
12380 t->exp.locus = set_block (t->exp.locus, b);
12382 else
12383 gcc_unreachable ();
12386 /* Create a nameless artificial label and put it in the current
12387 function context. The label has a location of LOC. Returns the
12388 newly created label. */
12390 tree
12391 create_artificial_label (location_t loc)
12393 tree lab = build_decl (loc,
12394 LABEL_DECL, NULL_TREE, void_type_node);
12396 DECL_ARTIFICIAL (lab) = 1;
12397 DECL_IGNORED_P (lab) = 1;
12398 DECL_CONTEXT (lab) = current_function_decl;
12399 return lab;
12402 /* Given a tree, try to return a useful variable name that we can use
12403 to prefix a temporary that is being assigned the value of the tree.
12404 I.E. given <temp> = &A, return A. */
12406 const char *
12407 get_name (tree t)
12409 tree stripped_decl;
12411 stripped_decl = t;
12412 STRIP_NOPS (stripped_decl);
12413 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12414 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12415 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12417 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12418 if (!name)
12419 return NULL;
12420 return IDENTIFIER_POINTER (name);
12422 else
12424 switch (TREE_CODE (stripped_decl))
12426 case ADDR_EXPR:
12427 return get_name (TREE_OPERAND (stripped_decl, 0));
12428 default:
12429 return NULL;
12434 /* Return true if TYPE has a variable argument list. */
12436 bool
12437 stdarg_p (const_tree fntype)
12439 function_args_iterator args_iter;
12440 tree n = NULL_TREE, t;
12442 if (!fntype)
12443 return false;
12445 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12447 n = t;
12450 return n != NULL_TREE && n != void_type_node;
12453 /* Return true if TYPE has a prototype. */
12455 bool
12456 prototype_p (const_tree fntype)
12458 tree t;
12460 gcc_assert (fntype != NULL_TREE);
12462 t = TYPE_ARG_TYPES (fntype);
12463 return (t != NULL_TREE);
12466 /* If BLOCK is inlined from an __attribute__((__artificial__))
12467 routine, return pointer to location from where it has been
12468 called. */
12469 location_t *
12470 block_nonartificial_location (tree block)
12472 location_t *ret = NULL;
12474 while (block && TREE_CODE (block) == BLOCK
12475 && BLOCK_ABSTRACT_ORIGIN (block))
12477 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12478 if (TREE_CODE (ao) == FUNCTION_DECL)
12480 /* If AO is an artificial inline, point RET to the
12481 call site locus at which it has been inlined and continue
12482 the loop, in case AO's caller is also an artificial
12483 inline. */
12484 if (DECL_DECLARED_INLINE_P (ao)
12485 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12486 ret = &BLOCK_SOURCE_LOCATION (block);
12487 else
12488 break;
12490 else if (TREE_CODE (ao) != BLOCK)
12491 break;
12493 block = BLOCK_SUPERCONTEXT (block);
12495 return ret;
12499 /* If EXP is inlined from an __attribute__((__artificial__))
12500 function, return the location of the original call expression. */
12502 location_t
12503 tree_nonartificial_location (tree exp)
12505 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12507 if (loc)
12508 return *loc;
12509 else
12510 return EXPR_LOCATION (exp);
12514 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12515 nodes. */
12517 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12519 hashval_t
12520 cl_option_hasher::hash (tree x)
12522 const_tree const t = x;
12523 const char *p;
12524 size_t i;
12525 size_t len = 0;
12526 hashval_t hash = 0;
12528 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12530 p = (const char *)TREE_OPTIMIZATION (t);
12531 len = sizeof (struct cl_optimization);
12534 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12535 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12537 else
12538 gcc_unreachable ();
12540 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12541 something else. */
12542 for (i = 0; i < len; i++)
12543 if (p[i])
12544 hash = (hash << 4) ^ ((i << 2) | p[i]);
12546 return hash;
12549 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12550 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12551 same. */
12553 bool
12554 cl_option_hasher::equal (tree x, tree y)
12556 const_tree const xt = x;
12557 const_tree const yt = y;
12559 if (TREE_CODE (xt) != TREE_CODE (yt))
12560 return 0;
12562 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12563 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12564 TREE_OPTIMIZATION (yt));
12565 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12566 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12567 TREE_TARGET_OPTION (yt));
12568 else
12569 gcc_unreachable ();
12572 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12574 tree
12575 build_optimization_node (struct gcc_options *opts)
12577 tree t;
12579 /* Use the cache of optimization nodes. */
12581 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12582 opts);
12584 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12585 t = *slot;
12586 if (!t)
12588 /* Insert this one into the hash table. */
12589 t = cl_optimization_node;
12590 *slot = t;
12592 /* Make a new node for next time round. */
12593 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12596 return t;
12599 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12601 tree
12602 build_target_option_node (struct gcc_options *opts)
12604 tree t;
12606 /* Use the cache of optimization nodes. */
12608 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12609 opts);
12611 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12612 t = *slot;
12613 if (!t)
12615 /* Insert this one into the hash table. */
12616 t = cl_target_option_node;
12617 *slot = t;
12619 /* Make a new node for next time round. */
12620 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12623 return t;
12626 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12627 so that they aren't saved during PCH writing. */
12629 void
12630 prepare_target_option_nodes_for_pch (void)
12632 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12633 for (; iter != cl_option_hash_table->end (); ++iter)
12634 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12635 TREE_TARGET_GLOBALS (*iter) = NULL;
12638 /* Determine the "ultimate origin" of a block. */
12640 tree
12641 block_ultimate_origin (const_tree block)
12643 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12645 if (origin == NULL_TREE)
12646 return NULL_TREE;
12647 else
12649 gcc_checking_assert ((DECL_P (origin)
12650 && DECL_ORIGIN (origin) == origin)
12651 || BLOCK_ORIGIN (origin) == origin);
12652 return origin;
12656 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12657 no instruction. */
12659 bool
12660 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12662 /* Do not strip casts into or out of differing address spaces. */
12663 if (POINTER_TYPE_P (outer_type)
12664 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12666 if (!POINTER_TYPE_P (inner_type)
12667 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12668 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12669 return false;
12671 else if (POINTER_TYPE_P (inner_type)
12672 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12674 /* We already know that outer_type is not a pointer with
12675 a non-generic address space. */
12676 return false;
12679 /* Use precision rather then machine mode when we can, which gives
12680 the correct answer even for submode (bit-field) types. */
12681 if ((INTEGRAL_TYPE_P (outer_type)
12682 || POINTER_TYPE_P (outer_type)
12683 || TREE_CODE (outer_type) == OFFSET_TYPE)
12684 && (INTEGRAL_TYPE_P (inner_type)
12685 || POINTER_TYPE_P (inner_type)
12686 || TREE_CODE (inner_type) == OFFSET_TYPE))
12687 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12689 /* Otherwise fall back on comparing machine modes (e.g. for
12690 aggregate types, floats). */
12691 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12694 /* Return true iff conversion in EXP generates no instruction. Mark
12695 it inline so that we fully inline into the stripping functions even
12696 though we have two uses of this function. */
12698 static inline bool
12699 tree_nop_conversion (const_tree exp)
12701 tree outer_type, inner_type;
12703 if (location_wrapper_p (exp))
12704 return true;
12705 if (!CONVERT_EXPR_P (exp)
12706 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12707 return false;
12709 outer_type = TREE_TYPE (exp);
12710 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12711 if (!inner_type || inner_type == error_mark_node)
12712 return false;
12714 return tree_nop_conversion_p (outer_type, inner_type);
12717 /* Return true iff conversion in EXP generates no instruction. Don't
12718 consider conversions changing the signedness. */
12720 static bool
12721 tree_sign_nop_conversion (const_tree exp)
12723 tree outer_type, inner_type;
12725 if (!tree_nop_conversion (exp))
12726 return false;
12728 outer_type = TREE_TYPE (exp);
12729 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12731 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12732 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12735 /* Strip conversions from EXP according to tree_nop_conversion and
12736 return the resulting expression. */
12738 tree
12739 tree_strip_nop_conversions (tree exp)
12741 while (tree_nop_conversion (exp))
12742 exp = TREE_OPERAND (exp, 0);
12743 return exp;
12746 /* Strip conversions from EXP according to tree_sign_nop_conversion
12747 and return the resulting expression. */
12749 tree
12750 tree_strip_sign_nop_conversions (tree exp)
12752 while (tree_sign_nop_conversion (exp))
12753 exp = TREE_OPERAND (exp, 0);
12754 return exp;
12757 /* Avoid any floating point extensions from EXP. */
12758 tree
12759 strip_float_extensions (tree exp)
12761 tree sub, expt, subt;
12763 /* For floating point constant look up the narrowest type that can hold
12764 it properly and handle it like (type)(narrowest_type)constant.
12765 This way we can optimize for instance a=a*2.0 where "a" is float
12766 but 2.0 is double constant. */
12767 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12769 REAL_VALUE_TYPE orig;
12770 tree type = NULL;
12772 orig = TREE_REAL_CST (exp);
12773 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12774 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12775 type = float_type_node;
12776 else if (TYPE_PRECISION (TREE_TYPE (exp))
12777 > TYPE_PRECISION (double_type_node)
12778 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12779 type = double_type_node;
12780 if (type)
12781 return build_real_truncate (type, orig);
12784 if (!CONVERT_EXPR_P (exp))
12785 return exp;
12787 sub = TREE_OPERAND (exp, 0);
12788 subt = TREE_TYPE (sub);
12789 expt = TREE_TYPE (exp);
12791 if (!FLOAT_TYPE_P (subt))
12792 return exp;
12794 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12795 return exp;
12797 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12798 return exp;
12800 return strip_float_extensions (sub);
12803 /* Strip out all handled components that produce invariant
12804 offsets. */
12806 const_tree
12807 strip_invariant_refs (const_tree op)
12809 while (handled_component_p (op))
12811 switch (TREE_CODE (op))
12813 case ARRAY_REF:
12814 case ARRAY_RANGE_REF:
12815 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12816 || TREE_OPERAND (op, 2) != NULL_TREE
12817 || TREE_OPERAND (op, 3) != NULL_TREE)
12818 return NULL;
12819 break;
12821 case COMPONENT_REF:
12822 if (TREE_OPERAND (op, 2) != NULL_TREE)
12823 return NULL;
12824 break;
12826 default:;
12828 op = TREE_OPERAND (op, 0);
12831 return op;
12834 static GTY(()) tree gcc_eh_personality_decl;
12836 /* Return the GCC personality function decl. */
12838 tree
12839 lhd_gcc_personality (void)
12841 if (!gcc_eh_personality_decl)
12842 gcc_eh_personality_decl = build_personality_function ("gcc");
12843 return gcc_eh_personality_decl;
12846 /* TARGET is a call target of GIMPLE call statement
12847 (obtained by gimple_call_fn). Return true if it is
12848 OBJ_TYPE_REF representing an virtual call of C++ method.
12849 (As opposed to OBJ_TYPE_REF representing objc calls
12850 through a cast where middle-end devirtualization machinery
12851 can't apply.) */
12853 bool
12854 virtual_method_call_p (const_tree target)
12856 if (TREE_CODE (target) != OBJ_TYPE_REF)
12857 return false;
12858 tree t = TREE_TYPE (target);
12859 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12860 t = TREE_TYPE (t);
12861 if (TREE_CODE (t) == FUNCTION_TYPE)
12862 return false;
12863 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12864 /* If we do not have BINFO associated, it means that type was built
12865 without devirtualization enabled. Do not consider this a virtual
12866 call. */
12867 if (!TYPE_BINFO (obj_type_ref_class (target)))
12868 return false;
12869 return true;
12872 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12874 static tree
12875 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12877 unsigned int i;
12878 tree base_binfo, b;
12880 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12881 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12882 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12883 return base_binfo;
12884 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12885 return b;
12886 return NULL;
12889 /* Try to find a base info of BINFO that would have its field decl at offset
12890 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12891 found, return, otherwise return NULL_TREE. */
12893 tree
12894 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12896 tree type = BINFO_TYPE (binfo);
12898 while (true)
12900 HOST_WIDE_INT pos, size;
12901 tree fld;
12902 int i;
12904 if (types_same_for_odr (type, expected_type))
12905 return binfo;
12906 if (maybe_lt (offset, 0))
12907 return NULL_TREE;
12909 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12911 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12912 continue;
12914 pos = int_bit_position (fld);
12915 size = tree_to_uhwi (DECL_SIZE (fld));
12916 if (known_in_range_p (offset, pos, size))
12917 break;
12919 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12920 return NULL_TREE;
12922 /* Offset 0 indicates the primary base, whose vtable contents are
12923 represented in the binfo for the derived class. */
12924 else if (maybe_ne (offset, 0))
12926 tree found_binfo = NULL, base_binfo;
12927 /* Offsets in BINFO are in bytes relative to the whole structure
12928 while POS is in bits relative to the containing field. */
12929 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12930 / BITS_PER_UNIT);
12932 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12933 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12934 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12936 found_binfo = base_binfo;
12937 break;
12939 if (found_binfo)
12940 binfo = found_binfo;
12941 else
12942 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12943 binfo_offset);
12946 type = TREE_TYPE (fld);
12947 offset -= pos;
12951 /* Returns true if X is a typedef decl. */
12953 bool
12954 is_typedef_decl (const_tree x)
12956 return (x && TREE_CODE (x) == TYPE_DECL
12957 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12960 /* Returns true iff TYPE is a type variant created for a typedef. */
12962 bool
12963 typedef_variant_p (const_tree type)
12965 return is_typedef_decl (TYPE_NAME (type));
12968 /* PR 84195: Replace control characters in "unescaped" with their
12969 escaped equivalents. Allow newlines if -fmessage-length has
12970 been set to a non-zero value. This is done here, rather than
12971 where the attribute is recorded as the message length can
12972 change between these two locations. */
12974 void
12975 escaped_string::escape (const char *unescaped)
12977 char *escaped;
12978 size_t i, new_i, len;
12980 if (m_owned)
12981 free (m_str);
12983 m_str = const_cast<char *> (unescaped);
12984 m_owned = false;
12986 if (unescaped == NULL || *unescaped == 0)
12987 return;
12989 len = strlen (unescaped);
12990 escaped = NULL;
12991 new_i = 0;
12993 for (i = 0; i < len; i++)
12995 char c = unescaped[i];
12997 if (!ISCNTRL (c))
12999 if (escaped)
13000 escaped[new_i++] = c;
13001 continue;
13004 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13006 if (escaped == NULL)
13008 /* We only allocate space for a new string if we
13009 actually encounter a control character that
13010 needs replacing. */
13011 escaped = (char *) xmalloc (len * 2 + 1);
13012 strncpy (escaped, unescaped, i);
13013 new_i = i;
13016 escaped[new_i++] = '\\';
13018 switch (c)
13020 case '\a': escaped[new_i++] = 'a'; break;
13021 case '\b': escaped[new_i++] = 'b'; break;
13022 case '\f': escaped[new_i++] = 'f'; break;
13023 case '\n': escaped[new_i++] = 'n'; break;
13024 case '\r': escaped[new_i++] = 'r'; break;
13025 case '\t': escaped[new_i++] = 't'; break;
13026 case '\v': escaped[new_i++] = 'v'; break;
13027 default: escaped[new_i++] = '?'; break;
13030 else if (escaped)
13031 escaped[new_i++] = c;
13034 if (escaped)
13036 escaped[new_i] = 0;
13037 m_str = escaped;
13038 m_owned = true;
13042 /* Warn about a use of an identifier which was marked deprecated. Returns
13043 whether a warning was given. */
13045 bool
13046 warn_deprecated_use (tree node, tree attr)
13048 escaped_string msg;
13050 if (node == 0 || !warn_deprecated_decl)
13051 return false;
13053 if (!attr)
13055 if (DECL_P (node))
13056 attr = DECL_ATTRIBUTES (node);
13057 else if (TYPE_P (node))
13059 tree decl = TYPE_STUB_DECL (node);
13060 if (decl)
13061 attr = lookup_attribute ("deprecated",
13062 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13066 if (attr)
13067 attr = lookup_attribute ("deprecated", attr);
13069 if (attr)
13070 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13072 bool w = false;
13073 if (DECL_P (node))
13075 auto_diagnostic_group d;
13076 if (msg)
13077 w = warning (OPT_Wdeprecated_declarations,
13078 "%qD is deprecated: %s", node, (const char *) msg);
13079 else
13080 w = warning (OPT_Wdeprecated_declarations,
13081 "%qD is deprecated", node);
13082 if (w)
13083 inform (DECL_SOURCE_LOCATION (node), "declared here");
13085 else if (TYPE_P (node))
13087 tree what = NULL_TREE;
13088 tree decl = TYPE_STUB_DECL (node);
13090 if (TYPE_NAME (node))
13092 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13093 what = TYPE_NAME (node);
13094 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13095 && DECL_NAME (TYPE_NAME (node)))
13096 what = DECL_NAME (TYPE_NAME (node));
13099 auto_diagnostic_group d;
13100 if (what)
13102 if (msg)
13103 w = warning (OPT_Wdeprecated_declarations,
13104 "%qE is deprecated: %s", what, (const char *) msg);
13105 else
13106 w = warning (OPT_Wdeprecated_declarations,
13107 "%qE is deprecated", what);
13109 else
13111 if (msg)
13112 w = warning (OPT_Wdeprecated_declarations,
13113 "type is deprecated: %s", (const char *) msg);
13114 else
13115 w = warning (OPT_Wdeprecated_declarations,
13116 "type is deprecated");
13119 if (w && decl)
13120 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13123 return w;
13126 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13127 somewhere in it. */
13129 bool
13130 contains_bitfld_component_ref_p (const_tree ref)
13132 while (handled_component_p (ref))
13134 if (TREE_CODE (ref) == COMPONENT_REF
13135 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13136 return true;
13137 ref = TREE_OPERAND (ref, 0);
13140 return false;
13143 /* Try to determine whether a TRY_CATCH expression can fall through.
13144 This is a subroutine of block_may_fallthru. */
13146 static bool
13147 try_catch_may_fallthru (const_tree stmt)
13149 tree_stmt_iterator i;
13151 /* If the TRY block can fall through, the whole TRY_CATCH can
13152 fall through. */
13153 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13154 return true;
13156 i = tsi_start (TREE_OPERAND (stmt, 1));
13157 switch (TREE_CODE (tsi_stmt (i)))
13159 case CATCH_EXPR:
13160 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13161 catch expression and a body. The whole TRY_CATCH may fall
13162 through iff any of the catch bodies falls through. */
13163 for (; !tsi_end_p (i); tsi_next (&i))
13165 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13166 return true;
13168 return false;
13170 case EH_FILTER_EXPR:
13171 /* The exception filter expression only matters if there is an
13172 exception. If the exception does not match EH_FILTER_TYPES,
13173 we will execute EH_FILTER_FAILURE, and we will fall through
13174 if that falls through. If the exception does match
13175 EH_FILTER_TYPES, the stack unwinder will continue up the
13176 stack, so we will not fall through. We don't know whether we
13177 will throw an exception which matches EH_FILTER_TYPES or not,
13178 so we just ignore EH_FILTER_TYPES and assume that we might
13179 throw an exception which doesn't match. */
13180 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13182 default:
13183 /* This case represents statements to be executed when an
13184 exception occurs. Those statements are implicitly followed
13185 by a RESX statement to resume execution after the exception.
13186 So in this case the TRY_CATCH never falls through. */
13187 return false;
13191 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13192 need not be 100% accurate; simply be conservative and return true if we
13193 don't know. This is used only to avoid stupidly generating extra code.
13194 If we're wrong, we'll just delete the extra code later. */
13196 bool
13197 block_may_fallthru (const_tree block)
13199 /* This CONST_CAST is okay because expr_last returns its argument
13200 unmodified and we assign it to a const_tree. */
13201 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13203 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13205 case GOTO_EXPR:
13206 case RETURN_EXPR:
13207 /* Easy cases. If the last statement of the block implies
13208 control transfer, then we can't fall through. */
13209 return false;
13211 case SWITCH_EXPR:
13212 /* If there is a default: label or case labels cover all possible
13213 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13214 to some case label in all cases and all we care is whether the
13215 SWITCH_BODY falls through. */
13216 if (SWITCH_ALL_CASES_P (stmt))
13217 return block_may_fallthru (SWITCH_BODY (stmt));
13218 return true;
13220 case COND_EXPR:
13221 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13222 return true;
13223 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13225 case BIND_EXPR:
13226 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13228 case TRY_CATCH_EXPR:
13229 return try_catch_may_fallthru (stmt);
13231 case TRY_FINALLY_EXPR:
13232 /* The finally clause is always executed after the try clause,
13233 so if it does not fall through, then the try-finally will not
13234 fall through. Otherwise, if the try clause does not fall
13235 through, then when the finally clause falls through it will
13236 resume execution wherever the try clause was going. So the
13237 whole try-finally will only fall through if both the try
13238 clause and the finally clause fall through. */
13239 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13240 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13242 case EH_ELSE_EXPR:
13243 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13245 case MODIFY_EXPR:
13246 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13247 stmt = TREE_OPERAND (stmt, 1);
13248 else
13249 return true;
13250 /* FALLTHRU */
13252 case CALL_EXPR:
13253 /* Functions that do not return do not fall through. */
13254 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13256 case CLEANUP_POINT_EXPR:
13257 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13259 case TARGET_EXPR:
13260 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13262 case ERROR_MARK:
13263 return true;
13265 default:
13266 return lang_hooks.block_may_fallthru (stmt);
13270 /* True if we are using EH to handle cleanups. */
13271 static bool using_eh_for_cleanups_flag = false;
13273 /* This routine is called from front ends to indicate eh should be used for
13274 cleanups. */
13275 void
13276 using_eh_for_cleanups (void)
13278 using_eh_for_cleanups_flag = true;
13281 /* Query whether EH is used for cleanups. */
13282 bool
13283 using_eh_for_cleanups_p (void)
13285 return using_eh_for_cleanups_flag;
13288 /* Wrapper for tree_code_name to ensure that tree code is valid */
13289 const char *
13290 get_tree_code_name (enum tree_code code)
13292 const char *invalid = "<invalid tree code>";
13294 if (code >= MAX_TREE_CODES)
13296 if (code == 0xa5a5)
13297 return "ggc_freed";
13298 return invalid;
13301 return tree_code_name[code];
13304 /* Drops the TREE_OVERFLOW flag from T. */
13306 tree
13307 drop_tree_overflow (tree t)
13309 gcc_checking_assert (TREE_OVERFLOW (t));
13311 /* For tree codes with a sharing machinery re-build the result. */
13312 if (poly_int_tree_p (t))
13313 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13315 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13316 and canonicalize the result. */
13317 if (TREE_CODE (t) == VECTOR_CST)
13319 tree_vector_builder builder;
13320 builder.new_unary_operation (TREE_TYPE (t), t, true);
13321 unsigned int count = builder.encoded_nelts ();
13322 for (unsigned int i = 0; i < count; ++i)
13324 tree elt = VECTOR_CST_ELT (t, i);
13325 if (TREE_OVERFLOW (elt))
13326 elt = drop_tree_overflow (elt);
13327 builder.quick_push (elt);
13329 return builder.build ();
13332 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13333 and drop the flag. */
13334 t = copy_node (t);
13335 TREE_OVERFLOW (t) = 0;
13337 /* For constants that contain nested constants, drop the flag
13338 from those as well. */
13339 if (TREE_CODE (t) == COMPLEX_CST)
13341 if (TREE_OVERFLOW (TREE_REALPART (t)))
13342 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13343 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13344 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13347 return t;
13350 /* Given a memory reference expression T, return its base address.
13351 The base address of a memory reference expression is the main
13352 object being referenced. For instance, the base address for
13353 'array[i].fld[j]' is 'array'. You can think of this as stripping
13354 away the offset part from a memory address.
13356 This function calls handled_component_p to strip away all the inner
13357 parts of the memory reference until it reaches the base object. */
13359 tree
13360 get_base_address (tree t)
13362 while (handled_component_p (t))
13363 t = TREE_OPERAND (t, 0);
13365 if ((TREE_CODE (t) == MEM_REF
13366 || TREE_CODE (t) == TARGET_MEM_REF)
13367 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13368 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13370 /* ??? Either the alias oracle or all callers need to properly deal
13371 with WITH_SIZE_EXPRs before we can look through those. */
13372 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13373 return NULL_TREE;
13375 return t;
13378 /* Return a tree of sizetype representing the size, in bytes, of the element
13379 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13381 tree
13382 array_ref_element_size (tree exp)
13384 tree aligned_size = TREE_OPERAND (exp, 3);
13385 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13386 location_t loc = EXPR_LOCATION (exp);
13388 /* If a size was specified in the ARRAY_REF, it's the size measured
13389 in alignment units of the element type. So multiply by that value. */
13390 if (aligned_size)
13392 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13393 sizetype from another type of the same width and signedness. */
13394 if (TREE_TYPE (aligned_size) != sizetype)
13395 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13396 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13397 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13400 /* Otherwise, take the size from that of the element type. Substitute
13401 any PLACEHOLDER_EXPR that we have. */
13402 else
13403 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13406 /* Return a tree representing the lower bound of the array mentioned in
13407 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13409 tree
13410 array_ref_low_bound (tree exp)
13412 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13414 /* If a lower bound is specified in EXP, use it. */
13415 if (TREE_OPERAND (exp, 2))
13416 return TREE_OPERAND (exp, 2);
13418 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13419 substituting for a PLACEHOLDER_EXPR as needed. */
13420 if (domain_type && TYPE_MIN_VALUE (domain_type))
13421 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13423 /* Otherwise, return a zero of the appropriate type. */
13424 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13425 return (idxtype == error_mark_node
13426 ? integer_zero_node : build_int_cst (idxtype, 0));
13429 /* Return a tree representing the upper bound of the array mentioned in
13430 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13432 tree
13433 array_ref_up_bound (tree exp)
13435 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13437 /* If there is a domain type and it has an upper bound, use it, substituting
13438 for a PLACEHOLDER_EXPR as needed. */
13439 if (domain_type && TYPE_MAX_VALUE (domain_type))
13440 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13442 /* Otherwise fail. */
13443 return NULL_TREE;
13446 /* Returns true if REF is an array reference, component reference,
13447 or memory reference to an array at the end of a structure.
13448 If this is the case, the array may be allocated larger
13449 than its upper bound implies. */
13451 bool
13452 array_at_struct_end_p (tree ref)
13454 tree atype;
13456 if (TREE_CODE (ref) == ARRAY_REF
13457 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13459 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13460 ref = TREE_OPERAND (ref, 0);
13462 else if (TREE_CODE (ref) == COMPONENT_REF
13463 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13464 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13465 else if (TREE_CODE (ref) == MEM_REF)
13467 tree arg = TREE_OPERAND (ref, 0);
13468 if (TREE_CODE (arg) == ADDR_EXPR)
13469 arg = TREE_OPERAND (arg, 0);
13470 tree argtype = TREE_TYPE (arg);
13471 if (TREE_CODE (argtype) == RECORD_TYPE)
13473 if (tree fld = last_field (argtype))
13475 atype = TREE_TYPE (fld);
13476 if (TREE_CODE (atype) != ARRAY_TYPE)
13477 return false;
13478 if (VAR_P (arg) && DECL_SIZE (fld))
13479 return false;
13481 else
13482 return false;
13484 else
13485 return false;
13487 else
13488 return false;
13490 if (TREE_CODE (ref) == STRING_CST)
13491 return false;
13493 tree ref_to_array = ref;
13494 while (handled_component_p (ref))
13496 /* If the reference chain contains a component reference to a
13497 non-union type and there follows another field the reference
13498 is not at the end of a structure. */
13499 if (TREE_CODE (ref) == COMPONENT_REF)
13501 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13503 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13504 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13505 nextf = DECL_CHAIN (nextf);
13506 if (nextf)
13507 return false;
13510 /* If we have a multi-dimensional array we do not consider
13511 a non-innermost dimension as flex array if the whole
13512 multi-dimensional array is at struct end.
13513 Same for an array of aggregates with a trailing array
13514 member. */
13515 else if (TREE_CODE (ref) == ARRAY_REF)
13516 return false;
13517 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13519 /* If we view an underlying object as sth else then what we
13520 gathered up to now is what we have to rely on. */
13521 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13522 break;
13523 else
13524 gcc_unreachable ();
13526 ref = TREE_OPERAND (ref, 0);
13529 /* The array now is at struct end. Treat flexible arrays as
13530 always subject to extend, even into just padding constrained by
13531 an underlying decl. */
13532 if (! TYPE_SIZE (atype)
13533 || ! TYPE_DOMAIN (atype)
13534 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13535 return true;
13537 if (TREE_CODE (ref) == MEM_REF
13538 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13539 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13541 /* If the reference is based on a declared entity, the size of the array
13542 is constrained by its given domain. (Do not trust commons PR/69368). */
13543 if (DECL_P (ref)
13544 && !(flag_unconstrained_commons
13545 && VAR_P (ref) && DECL_COMMON (ref))
13546 && DECL_SIZE_UNIT (ref)
13547 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13549 /* Check whether the array domain covers all of the available
13550 padding. */
13551 poly_int64 offset;
13552 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13553 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13554 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13555 return true;
13556 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13557 return true;
13559 /* If at least one extra element fits it is a flexarray. */
13560 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13561 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13562 + 2)
13563 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13564 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13565 return true;
13567 return false;
13570 return true;
13573 /* Return a tree representing the offset, in bytes, of the field referenced
13574 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13576 tree
13577 component_ref_field_offset (tree exp)
13579 tree aligned_offset = TREE_OPERAND (exp, 2);
13580 tree field = TREE_OPERAND (exp, 1);
13581 location_t loc = EXPR_LOCATION (exp);
13583 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13584 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13585 value. */
13586 if (aligned_offset)
13588 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13589 sizetype from another type of the same width and signedness. */
13590 if (TREE_TYPE (aligned_offset) != sizetype)
13591 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13592 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13593 size_int (DECL_OFFSET_ALIGN (field)
13594 / BITS_PER_UNIT));
13597 /* Otherwise, take the offset from that of the field. Substitute
13598 any PLACEHOLDER_EXPR that we have. */
13599 else
13600 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13603 /* Given the initializer INIT, return the initializer for the field
13604 DECL if it exists, otherwise null. Used to obtain the initializer
13605 for a flexible array member and determine its size. */
13607 static tree
13608 get_initializer_for (tree init, tree decl)
13610 STRIP_NOPS (init);
13612 tree fld, fld_init;
13613 unsigned HOST_WIDE_INT i;
13614 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13616 if (decl == fld)
13617 return fld_init;
13619 if (TREE_CODE (fld) == CONSTRUCTOR)
13621 fld_init = get_initializer_for (fld_init, decl);
13622 if (fld_init)
13623 return fld_init;
13627 return NULL_TREE;
13630 /* Determines the size of the member referenced by the COMPONENT_REF
13631 REF, using its initializer expression if necessary in order to
13632 determine the size of an initialized flexible array member.
13633 If non-null, *INTERIOR_ZERO_LENGTH is set when REF refers to
13634 an interior zero-length array.
13635 Returns the size as sizetype (which might be zero for an object
13636 with an uninitialized flexible array member) or null if the size
13637 cannot be determined. */
13639 tree
13640 component_ref_size (tree ref, bool *interior_zero_length /* = NULL */)
13642 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13644 bool int_0_len = false;
13645 if (!interior_zero_length)
13646 interior_zero_length = &int_0_len;
13648 tree member = TREE_OPERAND (ref, 1);
13650 tree memsize = DECL_SIZE_UNIT (member);
13651 if (memsize)
13653 tree memtype = TREE_TYPE (member);
13654 if (TREE_CODE (memtype) != ARRAY_TYPE)
13655 return memsize;
13657 bool trailing = array_at_struct_end_p (ref);
13658 bool zero_length = integer_zerop (memsize);
13659 if (!trailing && (!interior_zero_length || !zero_length))
13660 /* MEMBER is either an interior array or is an array with
13661 more than one element. */
13662 return memsize;
13664 *interior_zero_length = zero_length && !trailing;
13665 if (*interior_zero_length)
13666 memsize = NULL_TREE;
13668 if (!zero_length)
13669 if (tree dom = TYPE_DOMAIN (memtype))
13670 if (tree min = TYPE_MIN_VALUE (dom))
13671 if (tree max = TYPE_MAX_VALUE (dom))
13672 if (TREE_CODE (min) == INTEGER_CST
13673 && TREE_CODE (max) == INTEGER_CST)
13675 offset_int minidx = wi::to_offset (min);
13676 offset_int maxidx = wi::to_offset (max);
13677 if (maxidx - minidx > 0)
13678 /* MEMBER is an array with more than 1 element. */
13679 return memsize;
13683 /* MEMBER is either a bona fide flexible array member, or a zero-length
13684 array member, or an array of length one treated as such. */
13686 /* If the reference is to a declared object and the member a true
13687 flexible array, try to determine its size from its initializer. */
13688 poly_int64 baseoff = 0;
13689 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13690 if (!base || !VAR_P (base))
13692 if (!*interior_zero_length)
13693 return NULL_TREE;
13695 if (TREE_CODE (TREE_OPERAND (ref, 0)) != COMPONENT_REF)
13696 return NULL_TREE;
13698 base = TREE_OPERAND (ref, 0);
13699 while (TREE_CODE (base) == COMPONENT_REF)
13700 base = TREE_OPERAND (base, 0);
13701 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13704 /* BASE is the declared object of which MEMBER is either a member
13705 or that is cast to REFTYPE (e.g., a char buffer used to store
13706 a REFTYPE object). */
13707 tree reftype = TREE_TYPE (TREE_OPERAND (ref, 0));
13708 tree basetype = TREE_TYPE (base);
13710 /* Determine the base type of the referenced object. If it's
13711 the same as REFTYPE and MEMBER has a known size, return it. */
13712 tree bt = basetype;
13713 if (!*interior_zero_length)
13714 while (TREE_CODE (bt) == ARRAY_TYPE)
13715 bt = TREE_TYPE (bt);
13716 bool typematch = useless_type_conversion_p (reftype, bt);
13717 if (memsize && typematch)
13718 return memsize;
13720 memsize = NULL_TREE;
13722 if (typematch)
13723 /* MEMBER is a true flexible array member. Compute its size from
13724 the initializer of the BASE object if it has one. */
13725 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13726 if (init != error_mark_node)
13728 init = get_initializer_for (init, member);
13729 if (init)
13731 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13732 if (tree refsize = TYPE_SIZE_UNIT (reftype))
13734 /* Use the larger of the initializer size and the tail
13735 padding in the enclosing struct. */
13736 poly_int64 rsz = tree_to_poly_int64 (refsize);
13737 rsz -= baseoff;
13738 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13739 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13742 baseoff = 0;
13746 if (!memsize)
13748 if (typematch)
13750 if (DECL_P (base)
13751 && DECL_EXTERNAL (base)
13752 && bt == basetype
13753 && !*interior_zero_length)
13754 /* The size of a flexible array member of an extern struct
13755 with no initializer cannot be determined (it's defined
13756 in another translation unit and can have an initializer
13757 with an arbitrary number of elements). */
13758 return NULL_TREE;
13760 /* Use the size of the base struct or, for interior zero-length
13761 arrays, the size of the enclosing type. */
13762 memsize = TYPE_SIZE_UNIT (bt);
13764 else if (DECL_P (base))
13765 /* Use the size of the BASE object (possibly an array of some
13766 other type such as char used to store the struct). */
13767 memsize = DECL_SIZE_UNIT (base);
13768 else
13769 return NULL_TREE;
13772 /* If the flexible array member has a known size use the greater
13773 of it and the tail padding in the enclosing struct.
13774 Otherwise, when the size of the flexible array member is unknown
13775 and the referenced object is not a struct, use the size of its
13776 type when known. This detects sizes of array buffers when cast
13777 to struct types with flexible array members. */
13778 if (memsize)
13780 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13781 if (known_lt (baseoff, memsz64))
13783 memsz64 -= baseoff;
13784 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13786 return size_zero_node;
13789 /* Return "don't know" for an external non-array object since its
13790 flexible array member can be initialized to have any number of
13791 elements. Otherwise, return zero because the flexible array
13792 member has no elements. */
13793 return (DECL_P (base)
13794 && DECL_EXTERNAL (base)
13795 && (!typematch
13796 || TREE_CODE (basetype) != ARRAY_TYPE)
13797 ? NULL_TREE : size_zero_node);
13800 /* Return the machine mode of T. For vectors, returns the mode of the
13801 inner type. The main use case is to feed the result to HONOR_NANS,
13802 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13804 machine_mode
13805 element_mode (const_tree t)
13807 if (!TYPE_P (t))
13808 t = TREE_TYPE (t);
13809 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13810 t = TREE_TYPE (t);
13811 return TYPE_MODE (t);
13814 /* Vector types need to re-check the target flags each time we report
13815 the machine mode. We need to do this because attribute target can
13816 change the result of vector_mode_supported_p and have_regs_of_mode
13817 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13818 change on a per-function basis. */
13819 /* ??? Possibly a better solution is to run through all the types
13820 referenced by a function and re-compute the TYPE_MODE once, rather
13821 than make the TYPE_MODE macro call a function. */
13823 machine_mode
13824 vector_type_mode (const_tree t)
13826 machine_mode mode;
13828 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13830 mode = t->type_common.mode;
13831 if (VECTOR_MODE_P (mode)
13832 && (!targetm.vector_mode_supported_p (mode)
13833 || !have_regs_of_mode[mode]))
13835 scalar_int_mode innermode;
13837 /* For integers, try mapping it to a same-sized scalar mode. */
13838 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13840 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13841 * GET_MODE_BITSIZE (innermode));
13842 scalar_int_mode mode;
13843 if (int_mode_for_size (size, 0).exists (&mode)
13844 && have_regs_of_mode[mode])
13845 return mode;
13848 return BLKmode;
13851 return mode;
13854 /* Verify that basic properties of T match TV and thus T can be a variant of
13855 TV. TV should be the more specified variant (i.e. the main variant). */
13857 static bool
13858 verify_type_variant (const_tree t, tree tv)
13860 /* Type variant can differ by:
13862 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13863 ENCODE_QUAL_ADDR_SPACE.
13864 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13865 in this case some values may not be set in the variant types
13866 (see TYPE_COMPLETE_P checks).
13867 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13868 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13869 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13870 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13871 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13872 this is necessary to make it possible to merge types form different TUs
13873 - arrays, pointers and references may have TREE_TYPE that is a variant
13874 of TREE_TYPE of their main variants.
13875 - aggregates may have new TYPE_FIELDS list that list variants of
13876 the main variant TYPE_FIELDS.
13877 - vector types may differ by TYPE_VECTOR_OPAQUE
13880 /* Convenience macro for matching individual fields. */
13881 #define verify_variant_match(flag) \
13882 do { \
13883 if (flag (tv) != flag (t)) \
13885 error ("type variant differs by %s", #flag); \
13886 debug_tree (tv); \
13887 return false; \
13889 } while (false)
13891 /* tree_base checks. */
13893 verify_variant_match (TREE_CODE);
13894 /* FIXME: Ada builds non-artificial variants of artificial types. */
13895 if (TYPE_ARTIFICIAL (tv) && 0)
13896 verify_variant_match (TYPE_ARTIFICIAL);
13897 if (POINTER_TYPE_P (tv))
13898 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13899 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13900 verify_variant_match (TYPE_UNSIGNED);
13901 verify_variant_match (TYPE_PACKED);
13902 if (TREE_CODE (t) == REFERENCE_TYPE)
13903 verify_variant_match (TYPE_REF_IS_RVALUE);
13904 if (AGGREGATE_TYPE_P (t))
13905 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13906 else
13907 verify_variant_match (TYPE_SATURATING);
13908 /* FIXME: This check trigger during libstdc++ build. */
13909 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13910 verify_variant_match (TYPE_FINAL_P);
13912 /* tree_type_common checks. */
13914 if (COMPLETE_TYPE_P (t))
13916 verify_variant_match (TYPE_MODE);
13917 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13918 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13919 verify_variant_match (TYPE_SIZE);
13920 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13921 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13922 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13924 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13925 TYPE_SIZE_UNIT (tv), 0));
13926 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13927 debug_tree (tv);
13928 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13929 debug_tree (TYPE_SIZE_UNIT (tv));
13930 error ("type%'s %<TYPE_SIZE_UNIT%>");
13931 debug_tree (TYPE_SIZE_UNIT (t));
13932 return false;
13934 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13936 verify_variant_match (TYPE_PRECISION);
13937 if (RECORD_OR_UNION_TYPE_P (t))
13938 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13939 else if (TREE_CODE (t) == ARRAY_TYPE)
13940 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13941 /* During LTO we merge variant lists from diferent translation units
13942 that may differ BY TYPE_CONTEXT that in turn may point
13943 to TRANSLATION_UNIT_DECL.
13944 Ada also builds variants of types with different TYPE_CONTEXT. */
13945 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13946 verify_variant_match (TYPE_CONTEXT);
13947 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13948 verify_variant_match (TYPE_STRING_FLAG);
13949 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13950 verify_variant_match (TYPE_CXX_ODR_P);
13951 if (TYPE_ALIAS_SET_KNOWN_P (t))
13953 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13954 debug_tree (tv);
13955 return false;
13958 /* tree_type_non_common checks. */
13960 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13961 and dangle the pointer from time to time. */
13962 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13963 && (in_lto_p || !TYPE_VFIELD (tv)
13964 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13966 error ("type variant has different %<TYPE_VFIELD%>");
13967 debug_tree (tv);
13968 return false;
13970 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13971 || TREE_CODE (t) == INTEGER_TYPE
13972 || TREE_CODE (t) == BOOLEAN_TYPE
13973 || TREE_CODE (t) == REAL_TYPE
13974 || TREE_CODE (t) == FIXED_POINT_TYPE)
13976 verify_variant_match (TYPE_MAX_VALUE);
13977 verify_variant_match (TYPE_MIN_VALUE);
13979 if (TREE_CODE (t) == METHOD_TYPE)
13980 verify_variant_match (TYPE_METHOD_BASETYPE);
13981 if (TREE_CODE (t) == OFFSET_TYPE)
13982 verify_variant_match (TYPE_OFFSET_BASETYPE);
13983 if (TREE_CODE (t) == ARRAY_TYPE)
13984 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13985 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13986 or even type's main variant. This is needed to make bootstrap pass
13987 and the bug seems new in GCC 5.
13988 C++ FE should be updated to make this consistent and we should check
13989 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13990 is a match with main variant.
13992 Also disable the check for Java for now because of parser hack that builds
13993 first an dummy BINFO and then sometimes replace it by real BINFO in some
13994 of the copies. */
13995 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13996 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13997 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13998 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13999 at LTO time only. */
14000 && (in_lto_p && odr_type_p (t)))
14002 error ("type variant has different %<TYPE_BINFO%>");
14003 debug_tree (tv);
14004 error ("type variant%'s %<TYPE_BINFO%>");
14005 debug_tree (TYPE_BINFO (tv));
14006 error ("type%'s %<TYPE_BINFO%>");
14007 debug_tree (TYPE_BINFO (t));
14008 return false;
14011 /* Check various uses of TYPE_VALUES_RAW. */
14012 if (TREE_CODE (t) == ENUMERAL_TYPE
14013 && TYPE_VALUES (t))
14014 verify_variant_match (TYPE_VALUES);
14015 else if (TREE_CODE (t) == ARRAY_TYPE)
14016 verify_variant_match (TYPE_DOMAIN);
14017 /* Permit incomplete variants of complete type. While FEs may complete
14018 all variants, this does not happen for C++ templates in all cases. */
14019 else if (RECORD_OR_UNION_TYPE_P (t)
14020 && COMPLETE_TYPE_P (t)
14021 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14023 tree f1, f2;
14025 /* Fortran builds qualified variants as new records with items of
14026 qualified type. Verify that they looks same. */
14027 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14028 f1 && f2;
14029 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14030 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14031 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14032 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14033 /* FIXME: gfc_nonrestricted_type builds all types as variants
14034 with exception of pointer types. It deeply copies the type
14035 which means that we may end up with a variant type
14036 referring non-variant pointer. We may change it to
14037 produce types as variants, too, like
14038 objc_get_protocol_qualified_type does. */
14039 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14040 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14041 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14042 break;
14043 if (f1 || f2)
14045 error ("type variant has different %<TYPE_FIELDS%>");
14046 debug_tree (tv);
14047 error ("first mismatch is field");
14048 debug_tree (f1);
14049 error ("and field");
14050 debug_tree (f2);
14051 return false;
14054 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14055 verify_variant_match (TYPE_ARG_TYPES);
14056 /* For C++ the qualified variant of array type is really an array type
14057 of qualified TREE_TYPE.
14058 objc builds variants of pointer where pointer to type is a variant, too
14059 in objc_get_protocol_qualified_type. */
14060 if (TREE_TYPE (t) != TREE_TYPE (tv)
14061 && ((TREE_CODE (t) != ARRAY_TYPE
14062 && !POINTER_TYPE_P (t))
14063 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14064 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14066 error ("type variant has different %<TREE_TYPE%>");
14067 debug_tree (tv);
14068 error ("type variant%'s %<TREE_TYPE%>");
14069 debug_tree (TREE_TYPE (tv));
14070 error ("type%'s %<TREE_TYPE%>");
14071 debug_tree (TREE_TYPE (t));
14072 return false;
14074 if (type_with_alias_set_p (t)
14075 && !gimple_canonical_types_compatible_p (t, tv, false))
14077 error ("type is not compatible with its variant");
14078 debug_tree (tv);
14079 error ("type variant%'s %<TREE_TYPE%>");
14080 debug_tree (TREE_TYPE (tv));
14081 error ("type%'s %<TREE_TYPE%>");
14082 debug_tree (TREE_TYPE (t));
14083 return false;
14085 return true;
14086 #undef verify_variant_match
14090 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14091 the middle-end types_compatible_p function. It needs to avoid
14092 claiming types are different for types that should be treated
14093 the same with respect to TBAA. Canonical types are also used
14094 for IL consistency checks via the useless_type_conversion_p
14095 predicate which does not handle all type kinds itself but falls
14096 back to pointer-comparison of TYPE_CANONICAL for aggregates
14097 for example. */
14099 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14100 type calculation because we need to allow inter-operability between signed
14101 and unsigned variants. */
14103 bool
14104 type_with_interoperable_signedness (const_tree type)
14106 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14107 signed char and unsigned char. Similarly fortran FE builds
14108 C_SIZE_T as signed type, while C defines it unsigned. */
14110 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14111 == INTEGER_TYPE
14112 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14113 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14116 /* Return true iff T1 and T2 are structurally identical for what
14117 TBAA is concerned.
14118 This function is used both by lto.c canonical type merging and by the
14119 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14120 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14121 only for LTO because only in these cases TYPE_CANONICAL equivalence
14122 correspond to one defined by gimple_canonical_types_compatible_p. */
14124 bool
14125 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14126 bool trust_type_canonical)
14128 /* Type variants should be same as the main variant. When not doing sanity
14129 checking to verify this fact, go to main variants and save some work. */
14130 if (trust_type_canonical)
14132 t1 = TYPE_MAIN_VARIANT (t1);
14133 t2 = TYPE_MAIN_VARIANT (t2);
14136 /* Check first for the obvious case of pointer identity. */
14137 if (t1 == t2)
14138 return true;
14140 /* Check that we have two types to compare. */
14141 if (t1 == NULL_TREE || t2 == NULL_TREE)
14142 return false;
14144 /* We consider complete types always compatible with incomplete type.
14145 This does not make sense for canonical type calculation and thus we
14146 need to ensure that we are never called on it.
14148 FIXME: For more correctness the function probably should have three modes
14149 1) mode assuming that types are complete mathcing their structure
14150 2) mode allowing incomplete types but producing equivalence classes
14151 and thus ignoring all info from complete types
14152 3) mode allowing incomplete types to match complete but checking
14153 compatibility between complete types.
14155 1 and 2 can be used for canonical type calculation. 3 is the real
14156 definition of type compatibility that can be used i.e. for warnings during
14157 declaration merging. */
14159 gcc_assert (!trust_type_canonical
14160 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14162 /* If the types have been previously registered and found equal
14163 they still are. */
14165 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14166 && trust_type_canonical)
14168 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14169 they are always NULL, but they are set to non-NULL for types
14170 constructed by build_pointer_type and variants. In this case the
14171 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14172 all pointers are considered equal. Be sure to not return false
14173 negatives. */
14174 gcc_checking_assert (canonical_type_used_p (t1)
14175 && canonical_type_used_p (t2));
14176 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14179 /* For types where we do ODR based TBAA the canonical type is always
14180 set correctly, so we know that types are different if their
14181 canonical types does not match. */
14182 if (trust_type_canonical
14183 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14184 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14185 return false;
14187 /* Can't be the same type if the types don't have the same code. */
14188 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14189 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14190 return false;
14192 /* Qualifiers do not matter for canonical type comparison purposes. */
14194 /* Void types and nullptr types are always the same. */
14195 if (TREE_CODE (t1) == VOID_TYPE
14196 || TREE_CODE (t1) == NULLPTR_TYPE)
14197 return true;
14199 /* Can't be the same type if they have different mode. */
14200 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14201 return false;
14203 /* Non-aggregate types can be handled cheaply. */
14204 if (INTEGRAL_TYPE_P (t1)
14205 || SCALAR_FLOAT_TYPE_P (t1)
14206 || FIXED_POINT_TYPE_P (t1)
14207 || TREE_CODE (t1) == VECTOR_TYPE
14208 || TREE_CODE (t1) == COMPLEX_TYPE
14209 || TREE_CODE (t1) == OFFSET_TYPE
14210 || POINTER_TYPE_P (t1))
14212 /* Can't be the same type if they have different recision. */
14213 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14214 return false;
14216 /* In some cases the signed and unsigned types are required to be
14217 inter-operable. */
14218 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14219 && !type_with_interoperable_signedness (t1))
14220 return false;
14222 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14223 interoperable with "signed char". Unless all frontends are revisited
14224 to agree on these types, we must ignore the flag completely. */
14226 /* Fortran standard define C_PTR type that is compatible with every
14227 C pointer. For this reason we need to glob all pointers into one.
14228 Still pointers in different address spaces are not compatible. */
14229 if (POINTER_TYPE_P (t1))
14231 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14232 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14233 return false;
14236 /* Tail-recurse to components. */
14237 if (TREE_CODE (t1) == VECTOR_TYPE
14238 || TREE_CODE (t1) == COMPLEX_TYPE)
14239 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14240 TREE_TYPE (t2),
14241 trust_type_canonical);
14243 return true;
14246 /* Do type-specific comparisons. */
14247 switch (TREE_CODE (t1))
14249 case ARRAY_TYPE:
14250 /* Array types are the same if the element types are the same and
14251 the number of elements are the same. */
14252 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14253 trust_type_canonical)
14254 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14255 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14256 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14257 return false;
14258 else
14260 tree i1 = TYPE_DOMAIN (t1);
14261 tree i2 = TYPE_DOMAIN (t2);
14263 /* For an incomplete external array, the type domain can be
14264 NULL_TREE. Check this condition also. */
14265 if (i1 == NULL_TREE && i2 == NULL_TREE)
14266 return true;
14267 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14268 return false;
14269 else
14271 tree min1 = TYPE_MIN_VALUE (i1);
14272 tree min2 = TYPE_MIN_VALUE (i2);
14273 tree max1 = TYPE_MAX_VALUE (i1);
14274 tree max2 = TYPE_MAX_VALUE (i2);
14276 /* The minimum/maximum values have to be the same. */
14277 if ((min1 == min2
14278 || (min1 && min2
14279 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14280 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14281 || operand_equal_p (min1, min2, 0))))
14282 && (max1 == max2
14283 || (max1 && max2
14284 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14285 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14286 || operand_equal_p (max1, max2, 0)))))
14287 return true;
14288 else
14289 return false;
14293 case METHOD_TYPE:
14294 case FUNCTION_TYPE:
14295 /* Function types are the same if the return type and arguments types
14296 are the same. */
14297 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14298 trust_type_canonical))
14299 return false;
14301 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14302 return true;
14303 else
14305 tree parms1, parms2;
14307 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14308 parms1 && parms2;
14309 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14311 if (!gimple_canonical_types_compatible_p
14312 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14313 trust_type_canonical))
14314 return false;
14317 if (parms1 || parms2)
14318 return false;
14320 return true;
14323 case RECORD_TYPE:
14324 case UNION_TYPE:
14325 case QUAL_UNION_TYPE:
14327 tree f1, f2;
14329 /* Don't try to compare variants of an incomplete type, before
14330 TYPE_FIELDS has been copied around. */
14331 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14332 return true;
14335 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14336 return false;
14338 /* For aggregate types, all the fields must be the same. */
14339 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14340 f1 || f2;
14341 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14343 /* Skip non-fields and zero-sized fields. */
14344 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14345 || (DECL_SIZE (f1)
14346 && integer_zerop (DECL_SIZE (f1)))))
14347 f1 = TREE_CHAIN (f1);
14348 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14349 || (DECL_SIZE (f2)
14350 && integer_zerop (DECL_SIZE (f2)))))
14351 f2 = TREE_CHAIN (f2);
14352 if (!f1 || !f2)
14353 break;
14354 /* The fields must have the same name, offset and type. */
14355 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14356 || !gimple_compare_field_offset (f1, f2)
14357 || !gimple_canonical_types_compatible_p
14358 (TREE_TYPE (f1), TREE_TYPE (f2),
14359 trust_type_canonical))
14360 return false;
14363 /* If one aggregate has more fields than the other, they
14364 are not the same. */
14365 if (f1 || f2)
14366 return false;
14368 return true;
14371 default:
14372 /* Consider all types with language specific trees in them mutually
14373 compatible. This is executed only from verify_type and false
14374 positives can be tolerated. */
14375 gcc_assert (!in_lto_p);
14376 return true;
14380 /* Verify type T. */
14382 void
14383 verify_type (const_tree t)
14385 bool error_found = false;
14386 tree mv = TYPE_MAIN_VARIANT (t);
14387 if (!mv)
14389 error ("main variant is not defined");
14390 error_found = true;
14392 else if (mv != TYPE_MAIN_VARIANT (mv))
14394 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14395 debug_tree (mv);
14396 error_found = true;
14398 else if (t != mv && !verify_type_variant (t, mv))
14399 error_found = true;
14401 tree ct = TYPE_CANONICAL (t);
14402 if (!ct)
14404 else if (TYPE_CANONICAL (t) != ct)
14406 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14407 debug_tree (ct);
14408 error_found = true;
14410 /* Method and function types cannot be used to address memory and thus
14411 TYPE_CANONICAL really matters only for determining useless conversions.
14413 FIXME: C++ FE produce declarations of builtin functions that are not
14414 compatible with main variants. */
14415 else if (TREE_CODE (t) == FUNCTION_TYPE)
14417 else if (t != ct
14418 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14419 with variably sized arrays because their sizes possibly
14420 gimplified to different variables. */
14421 && !variably_modified_type_p (ct, NULL)
14422 && !gimple_canonical_types_compatible_p (t, ct, false)
14423 && COMPLETE_TYPE_P (t))
14425 error ("%<TYPE_CANONICAL%> is not compatible");
14426 debug_tree (ct);
14427 error_found = true;
14430 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14431 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14433 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14434 debug_tree (ct);
14435 error_found = true;
14437 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14439 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14440 debug_tree (ct);
14441 debug_tree (TYPE_MAIN_VARIANT (ct));
14442 error_found = true;
14446 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14447 if (RECORD_OR_UNION_TYPE_P (t))
14449 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14450 and danagle the pointer from time to time. */
14451 if (TYPE_VFIELD (t)
14452 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14453 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14455 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14456 debug_tree (TYPE_VFIELD (t));
14457 error_found = true;
14460 else if (TREE_CODE (t) == POINTER_TYPE)
14462 if (TYPE_NEXT_PTR_TO (t)
14463 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14465 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14466 debug_tree (TYPE_NEXT_PTR_TO (t));
14467 error_found = true;
14470 else if (TREE_CODE (t) == REFERENCE_TYPE)
14472 if (TYPE_NEXT_REF_TO (t)
14473 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14475 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14476 debug_tree (TYPE_NEXT_REF_TO (t));
14477 error_found = true;
14480 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14481 || TREE_CODE (t) == FIXED_POINT_TYPE)
14483 /* FIXME: The following check should pass:
14484 useless_type_conversion_p (const_cast <tree> (t),
14485 TREE_TYPE (TYPE_MIN_VALUE (t))
14486 but does not for C sizetypes in LTO. */
14489 /* Check various uses of TYPE_MAXVAL_RAW. */
14490 if (RECORD_OR_UNION_TYPE_P (t))
14492 if (!TYPE_BINFO (t))
14494 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14496 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14497 debug_tree (TYPE_BINFO (t));
14498 error_found = true;
14500 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14502 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14503 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14504 error_found = true;
14507 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14509 if (TYPE_METHOD_BASETYPE (t)
14510 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14511 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14513 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14514 debug_tree (TYPE_METHOD_BASETYPE (t));
14515 error_found = true;
14518 else if (TREE_CODE (t) == OFFSET_TYPE)
14520 if (TYPE_OFFSET_BASETYPE (t)
14521 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14522 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14524 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14525 debug_tree (TYPE_OFFSET_BASETYPE (t));
14526 error_found = true;
14529 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14530 || TREE_CODE (t) == FIXED_POINT_TYPE)
14532 /* FIXME: The following check should pass:
14533 useless_type_conversion_p (const_cast <tree> (t),
14534 TREE_TYPE (TYPE_MAX_VALUE (t))
14535 but does not for C sizetypes in LTO. */
14537 else if (TREE_CODE (t) == ARRAY_TYPE)
14539 if (TYPE_ARRAY_MAX_SIZE (t)
14540 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14542 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14543 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14544 error_found = true;
14547 else if (TYPE_MAX_VALUE_RAW (t))
14549 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14550 debug_tree (TYPE_MAX_VALUE_RAW (t));
14551 error_found = true;
14554 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14556 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14557 debug_tree (TYPE_LANG_SLOT_1 (t));
14558 error_found = true;
14561 /* Check various uses of TYPE_VALUES_RAW. */
14562 if (TREE_CODE (t) == ENUMERAL_TYPE)
14563 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14565 tree value = TREE_VALUE (l);
14566 tree name = TREE_PURPOSE (l);
14568 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14569 CONST_DECL of ENUMERAL TYPE. */
14570 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14572 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14573 debug_tree (value);
14574 debug_tree (name);
14575 error_found = true;
14577 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14578 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14580 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14581 "to the enum");
14582 debug_tree (value);
14583 debug_tree (name);
14584 error_found = true;
14586 if (TREE_CODE (name) != IDENTIFIER_NODE)
14588 error ("enum value name is not %<IDENTIFIER_NODE%>");
14589 debug_tree (value);
14590 debug_tree (name);
14591 error_found = true;
14594 else if (TREE_CODE (t) == ARRAY_TYPE)
14596 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14598 error ("array %<TYPE_DOMAIN%> is not integer type");
14599 debug_tree (TYPE_DOMAIN (t));
14600 error_found = true;
14603 else if (RECORD_OR_UNION_TYPE_P (t))
14605 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14607 error ("%<TYPE_FIELDS%> defined in incomplete type");
14608 error_found = true;
14610 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14612 /* TODO: verify properties of decls. */
14613 if (TREE_CODE (fld) == FIELD_DECL)
14615 else if (TREE_CODE (fld) == TYPE_DECL)
14617 else if (TREE_CODE (fld) == CONST_DECL)
14619 else if (VAR_P (fld))
14621 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14623 else if (TREE_CODE (fld) == USING_DECL)
14625 else if (TREE_CODE (fld) == FUNCTION_DECL)
14627 else
14629 error ("wrong tree in %<TYPE_FIELDS%> list");
14630 debug_tree (fld);
14631 error_found = true;
14635 else if (TREE_CODE (t) == INTEGER_TYPE
14636 || TREE_CODE (t) == BOOLEAN_TYPE
14637 || TREE_CODE (t) == OFFSET_TYPE
14638 || TREE_CODE (t) == REFERENCE_TYPE
14639 || TREE_CODE (t) == NULLPTR_TYPE
14640 || TREE_CODE (t) == POINTER_TYPE)
14642 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14644 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14645 "is %p",
14646 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14647 error_found = true;
14649 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14651 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14652 debug_tree (TYPE_CACHED_VALUES (t));
14653 error_found = true;
14655 /* Verify just enough of cache to ensure that no one copied it to new type.
14656 All copying should go by copy_node that should clear it. */
14657 else if (TYPE_CACHED_VALUES_P (t))
14659 int i;
14660 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14661 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14662 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14664 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14665 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14666 error_found = true;
14667 break;
14671 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14672 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14674 /* C++ FE uses TREE_PURPOSE to store initial values. */
14675 if (TREE_PURPOSE (l) && in_lto_p)
14677 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14678 debug_tree (l);
14679 error_found = true;
14681 if (!TYPE_P (TREE_VALUE (l)))
14683 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14684 debug_tree (l);
14685 error_found = true;
14688 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14690 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14691 debug_tree (TYPE_VALUES_RAW (t));
14692 error_found = true;
14694 if (TREE_CODE (t) != INTEGER_TYPE
14695 && TREE_CODE (t) != BOOLEAN_TYPE
14696 && TREE_CODE (t) != OFFSET_TYPE
14697 && TREE_CODE (t) != REFERENCE_TYPE
14698 && TREE_CODE (t) != NULLPTR_TYPE
14699 && TREE_CODE (t) != POINTER_TYPE
14700 && TYPE_CACHED_VALUES_P (t))
14702 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14703 error_found = true;
14706 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14707 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14708 of a type. */
14709 if (TREE_CODE (t) == METHOD_TYPE
14710 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14712 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14713 error_found = true;
14716 if (error_found)
14718 debug_tree (const_cast <tree> (t));
14719 internal_error ("%qs failed", __func__);
14724 /* Return 1 if ARG interpreted as signed in its precision is known to be
14725 always positive or 2 if ARG is known to be always negative, or 3 if
14726 ARG may be positive or negative. */
14729 get_range_pos_neg (tree arg)
14731 if (arg == error_mark_node)
14732 return 3;
14734 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14735 int cnt = 0;
14736 if (TREE_CODE (arg) == INTEGER_CST)
14738 wide_int w = wi::sext (wi::to_wide (arg), prec);
14739 if (wi::neg_p (w))
14740 return 2;
14741 else
14742 return 1;
14744 while (CONVERT_EXPR_P (arg)
14745 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14746 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14748 arg = TREE_OPERAND (arg, 0);
14749 /* Narrower value zero extended into wider type
14750 will always result in positive values. */
14751 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14752 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14753 return 1;
14754 prec = TYPE_PRECISION (TREE_TYPE (arg));
14755 if (++cnt > 30)
14756 return 3;
14759 if (TREE_CODE (arg) != SSA_NAME)
14760 return 3;
14761 wide_int arg_min, arg_max;
14762 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14764 gimple *g = SSA_NAME_DEF_STMT (arg);
14765 if (is_gimple_assign (g)
14766 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14768 tree t = gimple_assign_rhs1 (g);
14769 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14770 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14772 if (TYPE_UNSIGNED (TREE_TYPE (t))
14773 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14774 return 1;
14775 prec = TYPE_PRECISION (TREE_TYPE (t));
14776 arg = t;
14777 if (++cnt > 30)
14778 return 3;
14779 continue;
14782 return 3;
14784 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14786 /* For unsigned values, the "positive" range comes
14787 below the "negative" range. */
14788 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14789 return 1;
14790 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14791 return 2;
14793 else
14795 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14796 return 1;
14797 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14798 return 2;
14800 return 3;
14806 /* Return true if ARG is marked with the nonnull attribute in the
14807 current function signature. */
14809 bool
14810 nonnull_arg_p (const_tree arg)
14812 tree t, attrs, fntype;
14813 unsigned HOST_WIDE_INT arg_num;
14815 gcc_assert (TREE_CODE (arg) == PARM_DECL
14816 && (POINTER_TYPE_P (TREE_TYPE (arg))
14817 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14819 /* The static chain decl is always non null. */
14820 if (arg == cfun->static_chain_decl)
14821 return true;
14823 /* THIS argument of method is always non-NULL. */
14824 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14825 && arg == DECL_ARGUMENTS (cfun->decl)
14826 && flag_delete_null_pointer_checks)
14827 return true;
14829 /* Values passed by reference are always non-NULL. */
14830 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14831 && flag_delete_null_pointer_checks)
14832 return true;
14834 fntype = TREE_TYPE (cfun->decl);
14835 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14837 attrs = lookup_attribute ("nonnull", attrs);
14839 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14840 if (attrs == NULL_TREE)
14841 return false;
14843 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14844 if (TREE_VALUE (attrs) == NULL_TREE)
14845 return true;
14847 /* Get the position number for ARG in the function signature. */
14848 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14850 t = DECL_CHAIN (t), arg_num++)
14852 if (t == arg)
14853 break;
14856 gcc_assert (t == arg);
14858 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14859 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14861 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14862 return true;
14866 return false;
14869 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14870 information. */
14872 location_t
14873 set_block (location_t loc, tree block)
14875 location_t pure_loc = get_pure_location (loc);
14876 source_range src_range = get_range_from_loc (line_table, loc);
14877 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14880 location_t
14881 set_source_range (tree expr, location_t start, location_t finish)
14883 source_range src_range;
14884 src_range.m_start = start;
14885 src_range.m_finish = finish;
14886 return set_source_range (expr, src_range);
14889 location_t
14890 set_source_range (tree expr, source_range src_range)
14892 if (!EXPR_P (expr))
14893 return UNKNOWN_LOCATION;
14895 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14896 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14897 pure_loc,
14898 src_range,
14899 NULL);
14900 SET_EXPR_LOCATION (expr, adhoc);
14901 return adhoc;
14904 /* Return EXPR, potentially wrapped with a node expression LOC,
14905 if !CAN_HAVE_LOCATION_P (expr).
14907 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14908 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14910 Wrapper nodes can be identified using location_wrapper_p. */
14912 tree
14913 maybe_wrap_with_location (tree expr, location_t loc)
14915 if (expr == NULL)
14916 return NULL;
14917 if (loc == UNKNOWN_LOCATION)
14918 return expr;
14919 if (CAN_HAVE_LOCATION_P (expr))
14920 return expr;
14921 /* We should only be adding wrappers for constants and for decls,
14922 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14923 gcc_assert (CONSTANT_CLASS_P (expr)
14924 || DECL_P (expr)
14925 || EXCEPTIONAL_CLASS_P (expr));
14927 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14928 any impact of the wrapper nodes. */
14929 if (EXCEPTIONAL_CLASS_P (expr))
14930 return expr;
14932 /* If any auto_suppress_location_wrappers are active, don't create
14933 wrappers. */
14934 if (suppress_location_wrappers > 0)
14935 return expr;
14937 tree_code code
14938 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14939 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14940 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14941 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14942 /* Mark this node as being a wrapper. */
14943 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14944 return wrapper;
14947 int suppress_location_wrappers;
14949 /* Return the name of combined function FN, for debugging purposes. */
14951 const char *
14952 combined_fn_name (combined_fn fn)
14954 if (builtin_fn_p (fn))
14956 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14957 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14959 else
14960 return internal_fn_name (as_internal_fn (fn));
14963 /* Return a bitmap with a bit set corresponding to each argument in
14964 a function call type FNTYPE declared with attribute nonnull,
14965 or null if none of the function's argument are nonnull. The caller
14966 must free the bitmap. */
14968 bitmap
14969 get_nonnull_args (const_tree fntype)
14971 if (fntype == NULL_TREE)
14972 return NULL;
14974 tree attrs = TYPE_ATTRIBUTES (fntype);
14975 if (!attrs)
14976 return NULL;
14978 bitmap argmap = NULL;
14980 /* A function declaration can specify multiple attribute nonnull,
14981 each with zero or more arguments. The loop below creates a bitmap
14982 representing a union of all the arguments. An empty (but non-null)
14983 bitmap means that all arguments have been declaraed nonnull. */
14984 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14986 attrs = lookup_attribute ("nonnull", attrs);
14987 if (!attrs)
14988 break;
14990 if (!argmap)
14991 argmap = BITMAP_ALLOC (NULL);
14993 if (!TREE_VALUE (attrs))
14995 /* Clear the bitmap in case a previous attribute nonnull
14996 set it and this one overrides it for all arguments. */
14997 bitmap_clear (argmap);
14998 return argmap;
15001 /* Iterate over the indices of the format arguments declared nonnull
15002 and set a bit for each. */
15003 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15005 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15006 bitmap_set_bit (argmap, val);
15010 return argmap;
15013 /* Returns true if TYPE is a type where it and all of its subobjects
15014 (recursively) are of structure, union, or array type. */
15016 static bool
15017 default_is_empty_type (tree type)
15019 if (RECORD_OR_UNION_TYPE_P (type))
15021 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15022 if (TREE_CODE (field) == FIELD_DECL
15023 && !DECL_PADDING_P (field)
15024 && !default_is_empty_type (TREE_TYPE (field)))
15025 return false;
15026 return true;
15028 else if (TREE_CODE (type) == ARRAY_TYPE)
15029 return (integer_minus_onep (array_type_nelts (type))
15030 || TYPE_DOMAIN (type) == NULL_TREE
15031 || default_is_empty_type (TREE_TYPE (type)));
15032 return false;
15035 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15036 that shouldn't be passed via stack. */
15038 bool
15039 default_is_empty_record (const_tree type)
15041 if (!abi_version_at_least (12))
15042 return false;
15044 if (type == error_mark_node)
15045 return false;
15047 if (TREE_ADDRESSABLE (type))
15048 return false;
15050 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15053 /* Determine whether TYPE is a structure with a flexible array member,
15054 or a union containing such a structure (possibly recursively). */
15056 bool
15057 flexible_array_type_p (const_tree type)
15059 tree x, last;
15060 switch (TREE_CODE (type))
15062 case RECORD_TYPE:
15063 last = NULL_TREE;
15064 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15065 if (TREE_CODE (x) == FIELD_DECL)
15066 last = x;
15067 if (last == NULL_TREE)
15068 return false;
15069 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15070 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15071 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15072 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15073 return true;
15074 return false;
15075 case UNION_TYPE:
15076 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15078 if (TREE_CODE (x) == FIELD_DECL
15079 && flexible_array_type_p (TREE_TYPE (x)))
15080 return true;
15082 return false;
15083 default:
15084 return false;
15088 /* Like int_size_in_bytes, but handle empty records specially. */
15090 HOST_WIDE_INT
15091 arg_int_size_in_bytes (const_tree type)
15093 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15096 /* Like size_in_bytes, but handle empty records specially. */
15098 tree
15099 arg_size_in_bytes (const_tree type)
15101 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15104 /* Return true if an expression with CODE has to have the same result type as
15105 its first operand. */
15107 bool
15108 expr_type_first_operand_type_p (tree_code code)
15110 switch (code)
15112 case NEGATE_EXPR:
15113 case ABS_EXPR:
15114 case BIT_NOT_EXPR:
15115 case PAREN_EXPR:
15116 case CONJ_EXPR:
15118 case PLUS_EXPR:
15119 case MINUS_EXPR:
15120 case MULT_EXPR:
15121 case TRUNC_DIV_EXPR:
15122 case CEIL_DIV_EXPR:
15123 case FLOOR_DIV_EXPR:
15124 case ROUND_DIV_EXPR:
15125 case TRUNC_MOD_EXPR:
15126 case CEIL_MOD_EXPR:
15127 case FLOOR_MOD_EXPR:
15128 case ROUND_MOD_EXPR:
15129 case RDIV_EXPR:
15130 case EXACT_DIV_EXPR:
15131 case MIN_EXPR:
15132 case MAX_EXPR:
15133 case BIT_IOR_EXPR:
15134 case BIT_XOR_EXPR:
15135 case BIT_AND_EXPR:
15137 case LSHIFT_EXPR:
15138 case RSHIFT_EXPR:
15139 case LROTATE_EXPR:
15140 case RROTATE_EXPR:
15141 return true;
15143 default:
15144 return false;
15148 /* Return a typenode for the "standard" C type with a given name. */
15149 tree
15150 get_typenode_from_name (const char *name)
15152 if (name == NULL || *name == '\0')
15153 return NULL_TREE;
15155 if (strcmp (name, "char") == 0)
15156 return char_type_node;
15157 if (strcmp (name, "unsigned char") == 0)
15158 return unsigned_char_type_node;
15159 if (strcmp (name, "signed char") == 0)
15160 return signed_char_type_node;
15162 if (strcmp (name, "short int") == 0)
15163 return short_integer_type_node;
15164 if (strcmp (name, "short unsigned int") == 0)
15165 return short_unsigned_type_node;
15167 if (strcmp (name, "int") == 0)
15168 return integer_type_node;
15169 if (strcmp (name, "unsigned int") == 0)
15170 return unsigned_type_node;
15172 if (strcmp (name, "long int") == 0)
15173 return long_integer_type_node;
15174 if (strcmp (name, "long unsigned int") == 0)
15175 return long_unsigned_type_node;
15177 if (strcmp (name, "long long int") == 0)
15178 return long_long_integer_type_node;
15179 if (strcmp (name, "long long unsigned int") == 0)
15180 return long_long_unsigned_type_node;
15182 gcc_unreachable ();
15185 /* List of pointer types used to declare builtins before we have seen their
15186 real declaration.
15188 Keep the size up to date in tree.h ! */
15189 const builtin_structptr_type builtin_structptr_types[6] =
15191 { fileptr_type_node, ptr_type_node, "FILE" },
15192 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15193 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15194 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15195 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15196 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15199 /* Return the maximum object size. */
15201 tree
15202 max_object_size (void)
15204 /* To do: Make this a configurable parameter. */
15205 return TYPE_MAX_VALUE (ptrdiff_type_node);
15208 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15209 parameter default to false and that weeds out error_mark_node. */
15211 bool
15212 verify_type_context (location_t loc, type_context_kind context,
15213 const_tree type, bool silent_p)
15215 if (type == error_mark_node)
15216 return true;
15218 gcc_assert (TYPE_P (type));
15219 return (!targetm.verify_type_context
15220 || targetm.verify_type_context (loc, context, type, silent_p));
15223 #if CHECKING_P
15225 namespace selftest {
15227 /* Selftests for tree. */
15229 /* Verify that integer constants are sane. */
15231 static void
15232 test_integer_constants ()
15234 ASSERT_TRUE (integer_type_node != NULL);
15235 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15237 tree type = integer_type_node;
15239 tree zero = build_zero_cst (type);
15240 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15241 ASSERT_EQ (type, TREE_TYPE (zero));
15243 tree one = build_int_cst (type, 1);
15244 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15245 ASSERT_EQ (type, TREE_TYPE (zero));
15248 /* Verify identifiers. */
15250 static void
15251 test_identifiers ()
15253 tree identifier = get_identifier ("foo");
15254 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15255 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15258 /* Verify LABEL_DECL. */
15260 static void
15261 test_labels ()
15263 tree identifier = get_identifier ("err");
15264 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15265 identifier, void_type_node);
15266 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15267 ASSERT_FALSE (FORCED_LABEL (label_decl));
15270 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15271 are given by VALS. */
15273 static tree
15274 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15276 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15277 tree_vector_builder builder (type, vals.length (), 1);
15278 builder.splice (vals);
15279 return builder.build ();
15282 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15284 static void
15285 check_vector_cst (vec<tree> expected, tree actual)
15287 ASSERT_KNOWN_EQ (expected.length (),
15288 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15289 for (unsigned int i = 0; i < expected.length (); ++i)
15290 ASSERT_EQ (wi::to_wide (expected[i]),
15291 wi::to_wide (vector_cst_elt (actual, i)));
15294 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15295 and that its elements match EXPECTED. */
15297 static void
15298 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15299 unsigned int npatterns)
15301 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15302 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15303 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15304 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15305 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15306 check_vector_cst (expected, actual);
15309 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15310 and NPATTERNS background elements, and that its elements match
15311 EXPECTED. */
15313 static void
15314 check_vector_cst_fill (vec<tree> expected, tree actual,
15315 unsigned int npatterns)
15317 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15318 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15319 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15320 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15321 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15322 check_vector_cst (expected, actual);
15325 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15326 and that its elements match EXPECTED. */
15328 static void
15329 check_vector_cst_stepped (vec<tree> expected, tree actual,
15330 unsigned int npatterns)
15332 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15333 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15334 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15335 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15336 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15337 check_vector_cst (expected, actual);
15340 /* Test the creation of VECTOR_CSTs. */
15342 static void
15343 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15345 auto_vec<tree, 8> elements (8);
15346 elements.quick_grow (8);
15347 tree element_type = build_nonstandard_integer_type (16, true);
15348 tree vector_type = build_vector_type (element_type, 8);
15350 /* Test a simple linear series with a base of 0 and a step of 1:
15351 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15352 for (unsigned int i = 0; i < 8; ++i)
15353 elements[i] = build_int_cst (element_type, i);
15354 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15355 check_vector_cst_stepped (elements, vector, 1);
15357 /* Try the same with the first element replaced by 100:
15358 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15359 elements[0] = build_int_cst (element_type, 100);
15360 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15361 check_vector_cst_stepped (elements, vector, 1);
15363 /* Try a series that wraps around.
15364 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15365 for (unsigned int i = 1; i < 8; ++i)
15366 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15367 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15368 check_vector_cst_stepped (elements, vector, 1);
15370 /* Try a downward series:
15371 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15372 for (unsigned int i = 1; i < 8; ++i)
15373 elements[i] = build_int_cst (element_type, 80 - i);
15374 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15375 check_vector_cst_stepped (elements, vector, 1);
15377 /* Try two interleaved series with different bases and steps:
15378 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15379 elements[1] = build_int_cst (element_type, 53);
15380 for (unsigned int i = 2; i < 8; i += 2)
15382 elements[i] = build_int_cst (element_type, 70 - i * 2);
15383 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15385 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15386 check_vector_cst_stepped (elements, vector, 2);
15388 /* Try a duplicated value:
15389 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15390 for (unsigned int i = 1; i < 8; ++i)
15391 elements[i] = elements[0];
15392 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15393 check_vector_cst_duplicate (elements, vector, 1);
15395 /* Try an interleaved duplicated value:
15396 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15397 elements[1] = build_int_cst (element_type, 55);
15398 for (unsigned int i = 2; i < 8; ++i)
15399 elements[i] = elements[i - 2];
15400 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15401 check_vector_cst_duplicate (elements, vector, 2);
15403 /* Try a duplicated value with 2 exceptions
15404 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15405 elements[0] = build_int_cst (element_type, 41);
15406 elements[1] = build_int_cst (element_type, 97);
15407 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15408 check_vector_cst_fill (elements, vector, 2);
15410 /* Try with and without a step
15411 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15412 for (unsigned int i = 3; i < 8; i += 2)
15413 elements[i] = build_int_cst (element_type, i * 7);
15414 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15415 check_vector_cst_stepped (elements, vector, 2);
15417 /* Try a fully-general constant:
15418 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15419 elements[5] = build_int_cst (element_type, 9990);
15420 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15421 check_vector_cst_fill (elements, vector, 4);
15424 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15425 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15426 modifying its argument in-place. */
15428 static void
15429 check_strip_nops (tree node, tree expected)
15431 STRIP_NOPS (node);
15432 ASSERT_EQ (expected, node);
15435 /* Verify location wrappers. */
15437 static void
15438 test_location_wrappers ()
15440 location_t loc = BUILTINS_LOCATION;
15442 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15444 /* Wrapping a constant. */
15445 tree int_cst = build_int_cst (integer_type_node, 42);
15446 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15447 ASSERT_FALSE (location_wrapper_p (int_cst));
15449 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15450 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15451 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15452 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15454 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15455 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15457 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15458 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15459 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15460 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15462 /* Wrapping a STRING_CST. */
15463 tree string_cst = build_string (4, "foo");
15464 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15465 ASSERT_FALSE (location_wrapper_p (string_cst));
15467 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15468 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15469 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15470 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15471 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15474 /* Wrapping a variable. */
15475 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15476 get_identifier ("some_int_var"),
15477 integer_type_node);
15478 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15479 ASSERT_FALSE (location_wrapper_p (int_var));
15481 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15482 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15483 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15484 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15486 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15487 wrapper. */
15488 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15489 ASSERT_FALSE (location_wrapper_p (r_cast));
15490 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15492 /* Verify that STRIP_NOPS removes wrappers. */
15493 check_strip_nops (wrapped_int_cst, int_cst);
15494 check_strip_nops (wrapped_string_cst, string_cst);
15495 check_strip_nops (wrapped_int_var, int_var);
15498 /* Test various tree predicates. Verify that location wrappers don't
15499 affect the results. */
15501 static void
15502 test_predicates ()
15504 /* Build various constants and wrappers around them. */
15506 location_t loc = BUILTINS_LOCATION;
15508 tree i_0 = build_int_cst (integer_type_node, 0);
15509 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15511 tree i_1 = build_int_cst (integer_type_node, 1);
15512 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15514 tree i_m1 = build_int_cst (integer_type_node, -1);
15515 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15517 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15518 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15519 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15520 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15521 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15522 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15524 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15525 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15526 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15528 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15529 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15530 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15532 /* TODO: vector constants. */
15534 /* Test integer_onep. */
15535 ASSERT_FALSE (integer_onep (i_0));
15536 ASSERT_FALSE (integer_onep (wr_i_0));
15537 ASSERT_TRUE (integer_onep (i_1));
15538 ASSERT_TRUE (integer_onep (wr_i_1));
15539 ASSERT_FALSE (integer_onep (i_m1));
15540 ASSERT_FALSE (integer_onep (wr_i_m1));
15541 ASSERT_FALSE (integer_onep (f_0));
15542 ASSERT_FALSE (integer_onep (wr_f_0));
15543 ASSERT_FALSE (integer_onep (f_1));
15544 ASSERT_FALSE (integer_onep (wr_f_1));
15545 ASSERT_FALSE (integer_onep (f_m1));
15546 ASSERT_FALSE (integer_onep (wr_f_m1));
15547 ASSERT_FALSE (integer_onep (c_i_0));
15548 ASSERT_TRUE (integer_onep (c_i_1));
15549 ASSERT_FALSE (integer_onep (c_i_m1));
15550 ASSERT_FALSE (integer_onep (c_f_0));
15551 ASSERT_FALSE (integer_onep (c_f_1));
15552 ASSERT_FALSE (integer_onep (c_f_m1));
15554 /* Test integer_zerop. */
15555 ASSERT_TRUE (integer_zerop (i_0));
15556 ASSERT_TRUE (integer_zerop (wr_i_0));
15557 ASSERT_FALSE (integer_zerop (i_1));
15558 ASSERT_FALSE (integer_zerop (wr_i_1));
15559 ASSERT_FALSE (integer_zerop (i_m1));
15560 ASSERT_FALSE (integer_zerop (wr_i_m1));
15561 ASSERT_FALSE (integer_zerop (f_0));
15562 ASSERT_FALSE (integer_zerop (wr_f_0));
15563 ASSERT_FALSE (integer_zerop (f_1));
15564 ASSERT_FALSE (integer_zerop (wr_f_1));
15565 ASSERT_FALSE (integer_zerop (f_m1));
15566 ASSERT_FALSE (integer_zerop (wr_f_m1));
15567 ASSERT_TRUE (integer_zerop (c_i_0));
15568 ASSERT_FALSE (integer_zerop (c_i_1));
15569 ASSERT_FALSE (integer_zerop (c_i_m1));
15570 ASSERT_FALSE (integer_zerop (c_f_0));
15571 ASSERT_FALSE (integer_zerop (c_f_1));
15572 ASSERT_FALSE (integer_zerop (c_f_m1));
15574 /* Test integer_all_onesp. */
15575 ASSERT_FALSE (integer_all_onesp (i_0));
15576 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15577 ASSERT_FALSE (integer_all_onesp (i_1));
15578 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15579 ASSERT_TRUE (integer_all_onesp (i_m1));
15580 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15581 ASSERT_FALSE (integer_all_onesp (f_0));
15582 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15583 ASSERT_FALSE (integer_all_onesp (f_1));
15584 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15585 ASSERT_FALSE (integer_all_onesp (f_m1));
15586 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15587 ASSERT_FALSE (integer_all_onesp (c_i_0));
15588 ASSERT_FALSE (integer_all_onesp (c_i_1));
15589 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15590 ASSERT_FALSE (integer_all_onesp (c_f_0));
15591 ASSERT_FALSE (integer_all_onesp (c_f_1));
15592 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15594 /* Test integer_minus_onep. */
15595 ASSERT_FALSE (integer_minus_onep (i_0));
15596 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15597 ASSERT_FALSE (integer_minus_onep (i_1));
15598 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15599 ASSERT_TRUE (integer_minus_onep (i_m1));
15600 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15601 ASSERT_FALSE (integer_minus_onep (f_0));
15602 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15603 ASSERT_FALSE (integer_minus_onep (f_1));
15604 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15605 ASSERT_FALSE (integer_minus_onep (f_m1));
15606 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15607 ASSERT_FALSE (integer_minus_onep (c_i_0));
15608 ASSERT_FALSE (integer_minus_onep (c_i_1));
15609 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15610 ASSERT_FALSE (integer_minus_onep (c_f_0));
15611 ASSERT_FALSE (integer_minus_onep (c_f_1));
15612 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15614 /* Test integer_each_onep. */
15615 ASSERT_FALSE (integer_each_onep (i_0));
15616 ASSERT_FALSE (integer_each_onep (wr_i_0));
15617 ASSERT_TRUE (integer_each_onep (i_1));
15618 ASSERT_TRUE (integer_each_onep (wr_i_1));
15619 ASSERT_FALSE (integer_each_onep (i_m1));
15620 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15621 ASSERT_FALSE (integer_each_onep (f_0));
15622 ASSERT_FALSE (integer_each_onep (wr_f_0));
15623 ASSERT_FALSE (integer_each_onep (f_1));
15624 ASSERT_FALSE (integer_each_onep (wr_f_1));
15625 ASSERT_FALSE (integer_each_onep (f_m1));
15626 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15627 ASSERT_FALSE (integer_each_onep (c_i_0));
15628 ASSERT_FALSE (integer_each_onep (c_i_1));
15629 ASSERT_FALSE (integer_each_onep (c_i_m1));
15630 ASSERT_FALSE (integer_each_onep (c_f_0));
15631 ASSERT_FALSE (integer_each_onep (c_f_1));
15632 ASSERT_FALSE (integer_each_onep (c_f_m1));
15634 /* Test integer_truep. */
15635 ASSERT_FALSE (integer_truep (i_0));
15636 ASSERT_FALSE (integer_truep (wr_i_0));
15637 ASSERT_TRUE (integer_truep (i_1));
15638 ASSERT_TRUE (integer_truep (wr_i_1));
15639 ASSERT_FALSE (integer_truep (i_m1));
15640 ASSERT_FALSE (integer_truep (wr_i_m1));
15641 ASSERT_FALSE (integer_truep (f_0));
15642 ASSERT_FALSE (integer_truep (wr_f_0));
15643 ASSERT_FALSE (integer_truep (f_1));
15644 ASSERT_FALSE (integer_truep (wr_f_1));
15645 ASSERT_FALSE (integer_truep (f_m1));
15646 ASSERT_FALSE (integer_truep (wr_f_m1));
15647 ASSERT_FALSE (integer_truep (c_i_0));
15648 ASSERT_TRUE (integer_truep (c_i_1));
15649 ASSERT_FALSE (integer_truep (c_i_m1));
15650 ASSERT_FALSE (integer_truep (c_f_0));
15651 ASSERT_FALSE (integer_truep (c_f_1));
15652 ASSERT_FALSE (integer_truep (c_f_m1));
15654 /* Test integer_nonzerop. */
15655 ASSERT_FALSE (integer_nonzerop (i_0));
15656 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15657 ASSERT_TRUE (integer_nonzerop (i_1));
15658 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15659 ASSERT_TRUE (integer_nonzerop (i_m1));
15660 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15661 ASSERT_FALSE (integer_nonzerop (f_0));
15662 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15663 ASSERT_FALSE (integer_nonzerop (f_1));
15664 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15665 ASSERT_FALSE (integer_nonzerop (f_m1));
15666 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15667 ASSERT_FALSE (integer_nonzerop (c_i_0));
15668 ASSERT_TRUE (integer_nonzerop (c_i_1));
15669 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15670 ASSERT_FALSE (integer_nonzerop (c_f_0));
15671 ASSERT_FALSE (integer_nonzerop (c_f_1));
15672 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15674 /* Test real_zerop. */
15675 ASSERT_FALSE (real_zerop (i_0));
15676 ASSERT_FALSE (real_zerop (wr_i_0));
15677 ASSERT_FALSE (real_zerop (i_1));
15678 ASSERT_FALSE (real_zerop (wr_i_1));
15679 ASSERT_FALSE (real_zerop (i_m1));
15680 ASSERT_FALSE (real_zerop (wr_i_m1));
15681 ASSERT_TRUE (real_zerop (f_0));
15682 ASSERT_TRUE (real_zerop (wr_f_0));
15683 ASSERT_FALSE (real_zerop (f_1));
15684 ASSERT_FALSE (real_zerop (wr_f_1));
15685 ASSERT_FALSE (real_zerop (f_m1));
15686 ASSERT_FALSE (real_zerop (wr_f_m1));
15687 ASSERT_FALSE (real_zerop (c_i_0));
15688 ASSERT_FALSE (real_zerop (c_i_1));
15689 ASSERT_FALSE (real_zerop (c_i_m1));
15690 ASSERT_TRUE (real_zerop (c_f_0));
15691 ASSERT_FALSE (real_zerop (c_f_1));
15692 ASSERT_FALSE (real_zerop (c_f_m1));
15694 /* Test real_onep. */
15695 ASSERT_FALSE (real_onep (i_0));
15696 ASSERT_FALSE (real_onep (wr_i_0));
15697 ASSERT_FALSE (real_onep (i_1));
15698 ASSERT_FALSE (real_onep (wr_i_1));
15699 ASSERT_FALSE (real_onep (i_m1));
15700 ASSERT_FALSE (real_onep (wr_i_m1));
15701 ASSERT_FALSE (real_onep (f_0));
15702 ASSERT_FALSE (real_onep (wr_f_0));
15703 ASSERT_TRUE (real_onep (f_1));
15704 ASSERT_TRUE (real_onep (wr_f_1));
15705 ASSERT_FALSE (real_onep (f_m1));
15706 ASSERT_FALSE (real_onep (wr_f_m1));
15707 ASSERT_FALSE (real_onep (c_i_0));
15708 ASSERT_FALSE (real_onep (c_i_1));
15709 ASSERT_FALSE (real_onep (c_i_m1));
15710 ASSERT_FALSE (real_onep (c_f_0));
15711 ASSERT_TRUE (real_onep (c_f_1));
15712 ASSERT_FALSE (real_onep (c_f_m1));
15714 /* Test real_minus_onep. */
15715 ASSERT_FALSE (real_minus_onep (i_0));
15716 ASSERT_FALSE (real_minus_onep (wr_i_0));
15717 ASSERT_FALSE (real_minus_onep (i_1));
15718 ASSERT_FALSE (real_minus_onep (wr_i_1));
15719 ASSERT_FALSE (real_minus_onep (i_m1));
15720 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15721 ASSERT_FALSE (real_minus_onep (f_0));
15722 ASSERT_FALSE (real_minus_onep (wr_f_0));
15723 ASSERT_FALSE (real_minus_onep (f_1));
15724 ASSERT_FALSE (real_minus_onep (wr_f_1));
15725 ASSERT_TRUE (real_minus_onep (f_m1));
15726 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15727 ASSERT_FALSE (real_minus_onep (c_i_0));
15728 ASSERT_FALSE (real_minus_onep (c_i_1));
15729 ASSERT_FALSE (real_minus_onep (c_i_m1));
15730 ASSERT_FALSE (real_minus_onep (c_f_0));
15731 ASSERT_FALSE (real_minus_onep (c_f_1));
15732 ASSERT_TRUE (real_minus_onep (c_f_m1));
15734 /* Test zerop. */
15735 ASSERT_TRUE (zerop (i_0));
15736 ASSERT_TRUE (zerop (wr_i_0));
15737 ASSERT_FALSE (zerop (i_1));
15738 ASSERT_FALSE (zerop (wr_i_1));
15739 ASSERT_FALSE (zerop (i_m1));
15740 ASSERT_FALSE (zerop (wr_i_m1));
15741 ASSERT_TRUE (zerop (f_0));
15742 ASSERT_TRUE (zerop (wr_f_0));
15743 ASSERT_FALSE (zerop (f_1));
15744 ASSERT_FALSE (zerop (wr_f_1));
15745 ASSERT_FALSE (zerop (f_m1));
15746 ASSERT_FALSE (zerop (wr_f_m1));
15747 ASSERT_TRUE (zerop (c_i_0));
15748 ASSERT_FALSE (zerop (c_i_1));
15749 ASSERT_FALSE (zerop (c_i_m1));
15750 ASSERT_TRUE (zerop (c_f_0));
15751 ASSERT_FALSE (zerop (c_f_1));
15752 ASSERT_FALSE (zerop (c_f_m1));
15754 /* Test tree_expr_nonnegative_p. */
15755 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15756 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15757 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15758 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15759 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15760 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15761 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15762 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15763 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15764 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15765 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15766 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15767 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15768 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15769 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15770 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15771 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15772 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15774 /* Test tree_expr_nonzero_p. */
15775 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15776 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15777 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15778 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15779 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15780 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15782 /* Test integer_valued_real_p. */
15783 ASSERT_FALSE (integer_valued_real_p (i_0));
15784 ASSERT_TRUE (integer_valued_real_p (f_0));
15785 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15786 ASSERT_TRUE (integer_valued_real_p (f_1));
15787 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15789 /* Test integer_pow2p. */
15790 ASSERT_FALSE (integer_pow2p (i_0));
15791 ASSERT_TRUE (integer_pow2p (i_1));
15792 ASSERT_TRUE (integer_pow2p (wr_i_1));
15794 /* Test uniform_integer_cst_p. */
15795 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15796 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15797 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15798 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15799 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15800 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15801 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15802 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15803 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15804 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15805 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15806 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15807 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15808 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15809 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15810 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15811 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15812 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15815 /* Check that string escaping works correctly. */
15817 static void
15818 test_escaped_strings (void)
15820 int saved_cutoff;
15821 escaped_string msg;
15823 msg.escape (NULL);
15824 /* ASSERT_STREQ does not accept NULL as a valid test
15825 result, so we have to use ASSERT_EQ instead. */
15826 ASSERT_EQ (NULL, (const char *) msg);
15828 msg.escape ("");
15829 ASSERT_STREQ ("", (const char *) msg);
15831 msg.escape ("foobar");
15832 ASSERT_STREQ ("foobar", (const char *) msg);
15834 /* Ensure that we have -fmessage-length set to 0. */
15835 saved_cutoff = pp_line_cutoff (global_dc->printer);
15836 pp_line_cutoff (global_dc->printer) = 0;
15838 msg.escape ("foo\nbar");
15839 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15841 msg.escape ("\a\b\f\n\r\t\v");
15842 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15844 /* Now repeat the tests with -fmessage-length set to 5. */
15845 pp_line_cutoff (global_dc->printer) = 5;
15847 /* Note that the newline is not translated into an escape. */
15848 msg.escape ("foo\nbar");
15849 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15851 msg.escape ("\a\b\f\n\r\t\v");
15852 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15854 /* Restore the original message length setting. */
15855 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15858 /* Run all of the selftests within this file. */
15860 void
15861 tree_c_tests ()
15863 test_integer_constants ();
15864 test_identifiers ();
15865 test_labels ();
15866 test_vector_cst_patterns ();
15867 test_location_wrappers ();
15868 test_predicates ();
15869 test_escaped_strings ();
15872 } // namespace selftest
15874 #endif /* CHECKING_P */
15876 #include "gt-tree.h"