Fix previous commit
[official-gcc.git] / gcc / tree.c
blobe845fc7a00ee038b03d7a14d2b2d111f1197ac62
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
70 #include "gimple-fold.h"
72 /* Tree code classes. */
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
113 const char *const tree_code_class_strings[] =
115 "exceptional",
116 "constant",
117 "type",
118 "declaration",
119 "reference",
120 "comparison",
121 "unary",
122 "binary",
123 "statement",
124 "vl_exp",
125 "expression"
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131 /* Statistics-gathering stuff. */
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names[] = {
139 "decls",
140 "types",
141 "blocks",
142 "stmts",
143 "refs",
144 "exprs",
145 "constants",
146 "identifiers",
147 "vecs",
148 "binfos",
149 "ssa names",
150 "constructors",
151 "random kinds",
152 "lang_decl kinds",
153 "lang_type kinds",
154 "omp clauses",
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid;
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
168 struct GTY((for_user)) type_hash {
169 unsigned long hash;
170 tree type;
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 static hashval_t hash (type_hash *t) { return t->hash; }
179 static bool equal (type_hash *a, type_hash *b);
181 static int
182 keep_cache_entry (type_hash *&t)
184 return ggc_marked_p (t->type);
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node;
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 static hashval_t hash (tree t);
203 static bool equal (tree x, tree y);
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 typedef std::pair<tree, const poly_wide_int *> compare_type;
213 static hashval_t hash (tree t);
214 static bool equal (tree x, const compare_type &y);
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 static hashval_t hash (tree t);
230 static bool equal (tree x, tree y);
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235 /* General tree->tree mapping structure for use in hash tables. */
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248 static bool
249 equal (tree_vec_map *a, tree_vec_map *b)
251 return a->base.from == b->base.from;
254 static int
255 keep_cache_entry (tree_vec_map *&m)
257 return ggc_marked_p (m->base.from);
261 static GTY ((cache))
262 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
269 static tree build_array_type_1 (tree, tree, bool, bool);
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
277 bool tree_contains_struct[MAX_TREE_CODES][64];
279 /* Number of operands for each OpenMP clause. */
280 unsigned const char omp_clause_num_ops[] =
282 0, /* OMP_CLAUSE_ERROR */
283 1, /* OMP_CLAUSE_PRIVATE */
284 1, /* OMP_CLAUSE_SHARED */
285 1, /* OMP_CLAUSE_FIRSTPRIVATE */
286 2, /* OMP_CLAUSE_LASTPRIVATE */
287 5, /* OMP_CLAUSE_REDUCTION */
288 5, /* OMP_CLAUSE_TASK_REDUCTION */
289 5, /* OMP_CLAUSE_IN_REDUCTION */
290 1, /* OMP_CLAUSE_COPYIN */
291 1, /* OMP_CLAUSE_COPYPRIVATE */
292 3, /* OMP_CLAUSE_LINEAR */
293 2, /* OMP_CLAUSE_ALIGNED */
294 1, /* OMP_CLAUSE_DEPEND */
295 1, /* OMP_CLAUSE_NONTEMPORAL */
296 1, /* OMP_CLAUSE_UNIFORM */
297 1, /* OMP_CLAUSE_TO_DECLARE */
298 1, /* OMP_CLAUSE_LINK */
299 2, /* OMP_CLAUSE_FROM */
300 2, /* OMP_CLAUSE_TO */
301 2, /* OMP_CLAUSE_MAP */
302 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
303 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
304 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
305 1, /* OMP_CLAUSE_INCLUSIVE */
306 1, /* OMP_CLAUSE_EXCLUSIVE */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 2, /* OMP_CLAUSE__GRIDDIM_ */
361 0, /* OMP_CLAUSE_IF_PRESENT */
362 0, /* OMP_CLAUSE_FINALIZE */
365 const char * const omp_clause_code_name[] =
367 "error_clause",
368 "private",
369 "shared",
370 "firstprivate",
371 "lastprivate",
372 "reduction",
373 "task_reduction",
374 "in_reduction",
375 "copyin",
376 "copyprivate",
377 "linear",
378 "aligned",
379 "depend",
380 "nontemporal",
381 "uniform",
382 "to",
383 "link",
384 "from",
385 "to",
386 "map",
387 "use_device_ptr",
388 "use_device_addr",
389 "is_device_ptr",
390 "inclusive",
391 "exclusive",
392 "_cache_",
393 "gang",
394 "async",
395 "wait",
396 "auto",
397 "seq",
398 "_looptemp_",
399 "_reductemp_",
400 "_condtemp_",
401 "_scantemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "device_type",
422 "for",
423 "parallel",
424 "sections",
425 "taskgroup",
426 "priority",
427 "grainsize",
428 "num_tasks",
429 "nogroup",
430 "threads",
431 "simd",
432 "hint",
433 "defaultmap",
434 "order",
435 "bind",
436 "_simduid_",
437 "_simt_",
438 "independent",
439 "worker",
440 "vector",
441 "num_gangs",
442 "num_workers",
443 "vector_length",
444 "tile",
445 "_griddim_",
446 "if_present",
447 "finalize",
451 /* Return the tree node structure used by tree code CODE. */
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code)
456 switch (TREE_CODE_CLASS (code))
458 case tcc_declaration:
459 switch (code)
461 case CONST_DECL: return TS_CONST_DECL;
462 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
463 case FIELD_DECL: return TS_FIELD_DECL;
464 case FUNCTION_DECL: return TS_FUNCTION_DECL;
465 case LABEL_DECL: return TS_LABEL_DECL;
466 case PARM_DECL: return TS_PARM_DECL;
467 case RESULT_DECL: return TS_RESULT_DECL;
468 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 case TYPE_DECL: return TS_TYPE_DECL;
470 case VAR_DECL: return TS_VAR_DECL;
471 default: return TS_DECL_NON_COMMON;
474 case tcc_type: return TS_TYPE_NON_COMMON;
476 case tcc_binary:
477 case tcc_comparison:
478 case tcc_expression:
479 case tcc_reference:
480 case tcc_statement:
481 case tcc_unary:
482 case tcc_vl_exp: return TS_EXP;
484 default: /* tcc_constant and tcc_exceptional */
485 break;
488 switch (code)
490 /* tcc_constant cases. */
491 case COMPLEX_CST: return TS_COMPLEX;
492 case FIXED_CST: return TS_FIXED_CST;
493 case INTEGER_CST: return TS_INT_CST;
494 case POLY_INT_CST: return TS_POLY_INT_CST;
495 case REAL_CST: return TS_REAL_CST;
496 case STRING_CST: return TS_STRING;
497 case VECTOR_CST: return TS_VECTOR;
498 case VOID_CST: return TS_TYPED;
500 /* tcc_exceptional cases. */
501 case BLOCK: return TS_BLOCK;
502 case CONSTRUCTOR: return TS_CONSTRUCTOR;
503 case ERROR_MARK: return TS_COMMON;
504 case IDENTIFIER_NODE: return TS_IDENTIFIER;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case PLACEHOLDER_EXPR: return TS_COMMON;
508 case SSA_NAME: return TS_SSA_NAME;
509 case STATEMENT_LIST: return TS_STATEMENT_LIST;
510 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
511 case TREE_BINFO: return TS_BINFO;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
515 default:
516 gcc_unreachable ();
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522 nodes. */
524 static void
525 initialize_tree_contains_struct (void)
527 unsigned i;
529 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
531 enum tree_code code;
532 enum tree_node_structure_enum ts_code;
534 code = (enum tree_code) i;
535 ts_code = tree_node_structure_for_code (code);
537 /* Mark the TS structure itself. */
538 tree_contains_struct[code][ts_code] = 1;
540 /* Mark all the structures that TS is derived from. */
541 switch (ts_code)
543 case TS_TYPED:
544 case TS_BLOCK:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_BASE (code);
548 break;
550 case TS_COMMON:
551 case TS_INT_CST:
552 case TS_POLY_INT_CST:
553 case TS_REAL_CST:
554 case TS_FIXED_CST:
555 case TS_VECTOR:
556 case TS_STRING:
557 case TS_COMPLEX:
558 case TS_SSA_NAME:
559 case TS_CONSTRUCTOR:
560 case TS_EXP:
561 case TS_STATEMENT_LIST:
562 MARK_TS_TYPED (code);
563 break;
565 case TS_IDENTIFIER:
566 case TS_DECL_MINIMAL:
567 case TS_TYPE_COMMON:
568 case TS_LIST:
569 case TS_VEC:
570 case TS_BINFO:
571 case TS_OMP_CLAUSE:
572 MARK_TS_COMMON (code);
573 break;
575 case TS_TYPE_WITH_LANG_SPECIFIC:
576 MARK_TS_TYPE_COMMON (code);
577 break;
579 case TS_TYPE_NON_COMMON:
580 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 break;
583 case TS_DECL_COMMON:
584 MARK_TS_DECL_MINIMAL (code);
585 break;
587 case TS_DECL_WRTL:
588 case TS_CONST_DECL:
589 MARK_TS_DECL_COMMON (code);
590 break;
592 case TS_DECL_NON_COMMON:
593 MARK_TS_DECL_WITH_VIS (code);
594 break;
596 case TS_DECL_WITH_VIS:
597 case TS_PARM_DECL:
598 case TS_LABEL_DECL:
599 case TS_RESULT_DECL:
600 MARK_TS_DECL_WRTL (code);
601 break;
603 case TS_FIELD_DECL:
604 MARK_TS_DECL_COMMON (code);
605 break;
607 case TS_VAR_DECL:
608 MARK_TS_DECL_WITH_VIS (code);
609 break;
611 case TS_TYPE_DECL:
612 case TS_FUNCTION_DECL:
613 MARK_TS_DECL_NON_COMMON (code);
614 break;
616 case TS_TRANSLATION_UNIT_DECL:
617 MARK_TS_DECL_COMMON (code);
618 break;
620 default:
621 gcc_unreachable ();
625 /* Basic consistency checks for attributes used in fold. */
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
669 /* Init tree.c. */
671 void
672 init_ttree (void)
674 /* Initialize the hash table of types. */
675 type_hash_table
676 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
678 debug_expr_for_decl
679 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
681 value_expr_for_decl
682 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
684 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
686 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
688 int_cst_node = make_int_cst (1, 1);
690 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
692 cl_optimization_node = make_node (OPTIMIZATION_NODE);
693 cl_target_option_node = make_node (TARGET_OPTION_NODE);
695 /* Initialize the tree_contains_struct array. */
696 initialize_tree_contains_struct ();
697 lang_hooks.init_ts ();
701 /* The name of the object as the assembler will see it (but before any
702 translations made by ASM_OUTPUT_LABELREF). Often this is the same
703 as DECL_NAME. It is an IDENTIFIER_NODE. */
704 tree
705 decl_assembler_name (tree decl)
707 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708 lang_hooks.set_decl_assembler_name (decl);
709 return DECL_ASSEMBLER_NAME_RAW (decl);
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713 (either of which may be NULL). Inform the FE, if this changes the
714 name. */
716 void
717 overwrite_decl_assembler_name (tree decl, tree name)
719 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720 lang_hooks.overwrite_decl_assembler_name (decl, name);
723 /* When the target supports COMDAT groups, this indicates which group the
724 DECL is associated with. This can be either an IDENTIFIER_NODE or a
725 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
726 tree
727 decl_comdat_group (const_tree node)
729 struct symtab_node *snode = symtab_node::get (node);
730 if (!snode)
731 return NULL;
732 return snode->get_comdat_group ();
735 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
736 tree
737 decl_comdat_group_id (const_tree node)
739 struct symtab_node *snode = symtab_node::get (node);
740 if (!snode)
741 return NULL;
742 return snode->get_comdat_group_id ();
745 /* When the target supports named section, return its name as IDENTIFIER_NODE
746 or NULL if it is in no section. */
747 const char *
748 decl_section_name (const_tree node)
750 struct symtab_node *snode = symtab_node::get (node);
751 if (!snode)
752 return NULL;
753 return snode->get_section ();
756 /* Set section name of NODE to VALUE (that is expected to be
757 identifier node) */
758 void
759 set_decl_section_name (tree node, const char *value)
761 struct symtab_node *snode;
763 if (value == NULL)
765 snode = symtab_node::get (node);
766 if (!snode)
767 return;
769 else if (VAR_P (node))
770 snode = varpool_node::get_create (node);
771 else
772 snode = cgraph_node::get_create (node);
773 snode->set_section (value);
776 /* Return TLS model of a variable NODE. */
777 enum tls_model
778 decl_tls_model (const_tree node)
780 struct varpool_node *snode = varpool_node::get (node);
781 if (!snode)
782 return TLS_MODEL_NONE;
783 return snode->tls_model;
786 /* Set TLS model of variable NODE to MODEL. */
787 void
788 set_decl_tls_model (tree node, enum tls_model model)
790 struct varpool_node *vnode;
792 if (model == TLS_MODEL_NONE)
794 vnode = varpool_node::get (node);
795 if (!vnode)
796 return;
798 else
799 vnode = varpool_node::get_create (node);
800 vnode->tls_model = model;
803 /* Compute the number of bytes occupied by a tree with code CODE.
804 This function cannot be used for nodes that have variable sizes,
805 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
806 size_t
807 tree_code_size (enum tree_code code)
809 switch (TREE_CODE_CLASS (code))
811 case tcc_declaration: /* A decl node */
812 switch (code)
814 case FIELD_DECL: return sizeof (tree_field_decl);
815 case PARM_DECL: return sizeof (tree_parm_decl);
816 case VAR_DECL: return sizeof (tree_var_decl);
817 case LABEL_DECL: return sizeof (tree_label_decl);
818 case RESULT_DECL: return sizeof (tree_result_decl);
819 case CONST_DECL: return sizeof (tree_const_decl);
820 case TYPE_DECL: return sizeof (tree_type_decl);
821 case FUNCTION_DECL: return sizeof (tree_function_decl);
822 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
823 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
824 case NAMESPACE_DECL:
825 case IMPORTED_DECL:
826 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
827 default:
828 gcc_checking_assert (code >= NUM_TREE_CODES);
829 return lang_hooks.tree_size (code);
832 case tcc_type: /* a type node */
833 switch (code)
835 case OFFSET_TYPE:
836 case ENUMERAL_TYPE:
837 case BOOLEAN_TYPE:
838 case INTEGER_TYPE:
839 case REAL_TYPE:
840 case POINTER_TYPE:
841 case REFERENCE_TYPE:
842 case NULLPTR_TYPE:
843 case FIXED_POINT_TYPE:
844 case COMPLEX_TYPE:
845 case VECTOR_TYPE:
846 case ARRAY_TYPE:
847 case RECORD_TYPE:
848 case UNION_TYPE:
849 case QUAL_UNION_TYPE:
850 case VOID_TYPE:
851 case FUNCTION_TYPE:
852 case METHOD_TYPE:
853 case LANG_TYPE: return sizeof (tree_type_non_common);
854 default:
855 gcc_checking_assert (code >= NUM_TREE_CODES);
856 return lang_hooks.tree_size (code);
859 case tcc_reference: /* a reference */
860 case tcc_expression: /* an expression */
861 case tcc_statement: /* an expression with side effects */
862 case tcc_comparison: /* a comparison expression */
863 case tcc_unary: /* a unary arithmetic expression */
864 case tcc_binary: /* a binary arithmetic expression */
865 return (sizeof (struct tree_exp)
866 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
868 case tcc_constant: /* a constant */
869 switch (code)
871 case VOID_CST: return sizeof (tree_typed);
872 case INTEGER_CST: gcc_unreachable ();
873 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
874 case REAL_CST: return sizeof (tree_real_cst);
875 case FIXED_CST: return sizeof (tree_fixed_cst);
876 case COMPLEX_CST: return sizeof (tree_complex);
877 case VECTOR_CST: gcc_unreachable ();
878 case STRING_CST: gcc_unreachable ();
879 default:
880 gcc_checking_assert (code >= NUM_TREE_CODES);
881 return lang_hooks.tree_size (code);
884 case tcc_exceptional: /* something random, like an identifier. */
885 switch (code)
887 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
888 case TREE_LIST: return sizeof (tree_list);
890 case ERROR_MARK:
891 case PLACEHOLDER_EXPR: return sizeof (tree_common);
893 case TREE_VEC: gcc_unreachable ();
894 case OMP_CLAUSE: gcc_unreachable ();
896 case SSA_NAME: return sizeof (tree_ssa_name);
898 case STATEMENT_LIST: return sizeof (tree_statement_list);
899 case BLOCK: return sizeof (struct tree_block);
900 case CONSTRUCTOR: return sizeof (tree_constructor);
901 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
902 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
904 default:
905 gcc_checking_assert (code >= NUM_TREE_CODES);
906 return lang_hooks.tree_size (code);
909 default:
910 gcc_unreachable ();
914 /* Compute the number of bytes occupied by NODE. This routine only
915 looks at TREE_CODE, except for those nodes that have variable sizes. */
916 size_t
917 tree_size (const_tree node)
919 const enum tree_code code = TREE_CODE (node);
920 switch (code)
922 case INTEGER_CST:
923 return (sizeof (struct tree_int_cst)
924 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
926 case TREE_BINFO:
927 return (offsetof (struct tree_binfo, base_binfos)
928 + vec<tree, va_gc>
929 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
931 case TREE_VEC:
932 return (sizeof (struct tree_vec)
933 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
935 case VECTOR_CST:
936 return (sizeof (struct tree_vector)
937 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
939 case STRING_CST:
940 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
942 case OMP_CLAUSE:
943 return (sizeof (struct tree_omp_clause)
944 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
945 * sizeof (tree));
947 default:
948 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
949 return (sizeof (struct tree_exp)
950 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
951 else
952 return tree_code_size (code);
956 /* Return tree node kind based on tree CODE. */
958 static tree_node_kind
959 get_stats_node_kind (enum tree_code code)
961 enum tree_code_class type = TREE_CODE_CLASS (code);
963 switch (type)
965 case tcc_declaration: /* A decl node */
966 return d_kind;
967 case tcc_type: /* a type node */
968 return t_kind;
969 case tcc_statement: /* an expression with side effects */
970 return s_kind;
971 case tcc_reference: /* a reference */
972 return r_kind;
973 case tcc_expression: /* an expression */
974 case tcc_comparison: /* a comparison expression */
975 case tcc_unary: /* a unary arithmetic expression */
976 case tcc_binary: /* a binary arithmetic expression */
977 return e_kind;
978 case tcc_constant: /* a constant */
979 return c_kind;
980 case tcc_exceptional: /* something random, like an identifier. */
981 switch (code)
983 case IDENTIFIER_NODE:
984 return id_kind;
985 case TREE_VEC:
986 return vec_kind;
987 case TREE_BINFO:
988 return binfo_kind;
989 case SSA_NAME:
990 return ssa_name_kind;
991 case BLOCK:
992 return b_kind;
993 case CONSTRUCTOR:
994 return constr_kind;
995 case OMP_CLAUSE:
996 return omp_clause_kind;
997 default:
998 return x_kind;
1000 break;
1001 case tcc_vl_exp:
1002 return e_kind;
1003 default:
1004 gcc_unreachable ();
1008 /* Record interesting allocation statistics for a tree node with CODE
1009 and LENGTH. */
1011 static void
1012 record_node_allocation_statistics (enum tree_code code, size_t length)
1014 if (!GATHER_STATISTICS)
1015 return;
1017 tree_node_kind kind = get_stats_node_kind (code);
1019 tree_code_counts[(int) code]++;
1020 tree_node_counts[(int) kind]++;
1021 tree_node_sizes[(int) kind] += length;
1024 /* Allocate and return a new UID from the DECL_UID namespace. */
1027 allocate_decl_uid (void)
1029 return next_decl_uid++;
1032 /* Return a newly allocated node of code CODE. For decl and type
1033 nodes, some other fields are initialized. The rest of the node is
1034 initialized to zero. This function cannot be used for TREE_VEC,
1035 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1036 tree_code_size.
1038 Achoo! I got a code in the node. */
1040 tree
1041 make_node (enum tree_code code MEM_STAT_DECL)
1043 tree t;
1044 enum tree_code_class type = TREE_CODE_CLASS (code);
1045 size_t length = tree_code_size (code);
1047 record_node_allocation_statistics (code, length);
1049 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1050 TREE_SET_CODE (t, code);
1052 switch (type)
1054 case tcc_statement:
1055 if (code != DEBUG_BEGIN_STMT)
1056 TREE_SIDE_EFFECTS (t) = 1;
1057 break;
1059 case tcc_declaration:
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1062 if (code == FUNCTION_DECL)
1064 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1065 SET_DECL_MODE (t, FUNCTION_MODE);
1067 else
1068 SET_DECL_ALIGN (t, 1);
1070 DECL_SOURCE_LOCATION (t) = input_location;
1071 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1072 DECL_UID (t) = --next_debug_decl_uid;
1073 else
1075 DECL_UID (t) = allocate_decl_uid ();
1076 SET_DECL_PT_UID (t, -1);
1078 if (TREE_CODE (t) == LABEL_DECL)
1079 LABEL_DECL_UID (t) = -1;
1081 break;
1083 case tcc_type:
1084 TYPE_UID (t) = next_type_uid++;
1085 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1086 TYPE_USER_ALIGN (t) = 0;
1087 TYPE_MAIN_VARIANT (t) = t;
1088 TYPE_CANONICAL (t) = t;
1090 /* Default to no attributes for type, but let target change that. */
1091 TYPE_ATTRIBUTES (t) = NULL_TREE;
1092 targetm.set_default_type_attributes (t);
1094 /* We have not yet computed the alias set for this type. */
1095 TYPE_ALIAS_SET (t) = -1;
1096 break;
1098 case tcc_constant:
1099 TREE_CONSTANT (t) = 1;
1100 break;
1102 case tcc_expression:
1103 switch (code)
1105 case INIT_EXPR:
1106 case MODIFY_EXPR:
1107 case VA_ARG_EXPR:
1108 case PREDECREMENT_EXPR:
1109 case PREINCREMENT_EXPR:
1110 case POSTDECREMENT_EXPR:
1111 case POSTINCREMENT_EXPR:
1112 /* All of these have side-effects, no matter what their
1113 operands are. */
1114 TREE_SIDE_EFFECTS (t) = 1;
1115 break;
1117 default:
1118 break;
1120 break;
1122 case tcc_exceptional:
1123 switch (code)
1125 case TARGET_OPTION_NODE:
1126 TREE_TARGET_OPTION(t)
1127 = ggc_cleared_alloc<struct cl_target_option> ();
1128 break;
1130 case OPTIMIZATION_NODE:
1131 TREE_OPTIMIZATION (t)
1132 = ggc_cleared_alloc<struct cl_optimization> ();
1133 break;
1135 default:
1136 break;
1138 break;
1140 default:
1141 /* Other classes need no special treatment. */
1142 break;
1145 return t;
1148 /* Free tree node. */
1150 void
1151 free_node (tree node)
1153 enum tree_code code = TREE_CODE (node);
1154 if (GATHER_STATISTICS)
1156 enum tree_node_kind kind = get_stats_node_kind (code);
1158 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1159 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1160 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1162 tree_code_counts[(int) TREE_CODE (node)]--;
1163 tree_node_counts[(int) kind]--;
1164 tree_node_sizes[(int) kind] -= tree_size (node);
1166 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1167 vec_free (CONSTRUCTOR_ELTS (node));
1168 else if (code == BLOCK)
1169 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1170 else if (code == TREE_BINFO)
1171 vec_free (BINFO_BASE_ACCESSES (node));
1172 ggc_free (node);
1175 /* Return a new node with the same contents as NODE except that its
1176 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1178 tree
1179 copy_node (tree node MEM_STAT_DECL)
1181 tree t;
1182 enum tree_code code = TREE_CODE (node);
1183 size_t length;
1185 gcc_assert (code != STATEMENT_LIST);
1187 length = tree_size (node);
1188 record_node_allocation_statistics (code, length);
1189 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1190 memcpy (t, node, length);
1192 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1193 TREE_CHAIN (t) = 0;
1194 TREE_ASM_WRITTEN (t) = 0;
1195 TREE_VISITED (t) = 0;
1197 if (TREE_CODE_CLASS (code) == tcc_declaration)
1199 if (code == DEBUG_EXPR_DECL)
1200 DECL_UID (t) = --next_debug_decl_uid;
1201 else
1203 DECL_UID (t) = allocate_decl_uid ();
1204 if (DECL_PT_UID_SET_P (node))
1205 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1207 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1208 && DECL_HAS_VALUE_EXPR_P (node))
1210 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1211 DECL_HAS_VALUE_EXPR_P (t) = 1;
1213 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1214 if (VAR_P (node))
1216 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1217 t->decl_with_vis.symtab_node = NULL;
1219 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1221 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1222 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1224 if (TREE_CODE (node) == FUNCTION_DECL)
1226 DECL_STRUCT_FUNCTION (t) = NULL;
1227 t->decl_with_vis.symtab_node = NULL;
1230 else if (TREE_CODE_CLASS (code) == tcc_type)
1232 TYPE_UID (t) = next_type_uid++;
1233 /* The following is so that the debug code for
1234 the copy is different from the original type.
1235 The two statements usually duplicate each other
1236 (because they clear fields of the same union),
1237 but the optimizer should catch that. */
1238 TYPE_SYMTAB_ADDRESS (t) = 0;
1239 TYPE_SYMTAB_DIE (t) = 0;
1241 /* Do not copy the values cache. */
1242 if (TYPE_CACHED_VALUES_P (t))
1244 TYPE_CACHED_VALUES_P (t) = 0;
1245 TYPE_CACHED_VALUES (t) = NULL_TREE;
1248 else if (code == TARGET_OPTION_NODE)
1250 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1251 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1252 sizeof (struct cl_target_option));
1254 else if (code == OPTIMIZATION_NODE)
1256 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1257 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1258 sizeof (struct cl_optimization));
1261 return t;
1264 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1265 For example, this can copy a list made of TREE_LIST nodes. */
1267 tree
1268 copy_list (tree list)
1270 tree head;
1271 tree prev, next;
1273 if (list == 0)
1274 return 0;
1276 head = prev = copy_node (list);
1277 next = TREE_CHAIN (list);
1278 while (next)
1280 TREE_CHAIN (prev) = copy_node (next);
1281 prev = TREE_CHAIN (prev);
1282 next = TREE_CHAIN (next);
1284 return head;
1288 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1289 INTEGER_CST with value CST and type TYPE. */
1291 static unsigned int
1292 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1294 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1295 /* We need extra HWIs if CST is an unsigned integer with its
1296 upper bit set. */
1297 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1298 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1299 return cst.get_len ();
1302 /* Return a new INTEGER_CST with value CST and type TYPE. */
1304 static tree
1305 build_new_int_cst (tree type, const wide_int &cst)
1307 unsigned int len = cst.get_len ();
1308 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1309 tree nt = make_int_cst (len, ext_len);
1311 if (len < ext_len)
1313 --ext_len;
1314 TREE_INT_CST_ELT (nt, ext_len)
1315 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1316 for (unsigned int i = len; i < ext_len; ++i)
1317 TREE_INT_CST_ELT (nt, i) = -1;
1319 else if (TYPE_UNSIGNED (type)
1320 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1322 len--;
1323 TREE_INT_CST_ELT (nt, len)
1324 = zext_hwi (cst.elt (len),
1325 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1328 for (unsigned int i = 0; i < len; i++)
1329 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1330 TREE_TYPE (nt) = type;
1331 return nt;
1334 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1336 static tree
1337 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1338 CXX_MEM_STAT_INFO)
1340 size_t length = sizeof (struct tree_poly_int_cst);
1341 record_node_allocation_statistics (POLY_INT_CST, length);
1343 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1345 TREE_SET_CODE (t, POLY_INT_CST);
1346 TREE_CONSTANT (t) = 1;
1347 TREE_TYPE (t) = type;
1348 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1349 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1350 return t;
1353 /* Create a constant tree that contains CST sign-extended to TYPE. */
1355 tree
1356 build_int_cst (tree type, poly_int64 cst)
1358 /* Support legacy code. */
1359 if (!type)
1360 type = integer_type_node;
1362 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1365 /* Create a constant tree that contains CST zero-extended to TYPE. */
1367 tree
1368 build_int_cstu (tree type, poly_uint64 cst)
1370 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1373 /* Create a constant tree that contains CST sign-extended to TYPE. */
1375 tree
1376 build_int_cst_type (tree type, poly_int64 cst)
1378 gcc_assert (type);
1379 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1382 /* Constructs tree in type TYPE from with value given by CST. Signedness
1383 of CST is assumed to be the same as the signedness of TYPE. */
1385 tree
1386 double_int_to_tree (tree type, double_int cst)
1388 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1391 /* We force the wide_int CST to the range of the type TYPE by sign or
1392 zero extending it. OVERFLOWABLE indicates if we are interested in
1393 overflow of the value, when >0 we are only interested in signed
1394 overflow, for <0 we are interested in any overflow. OVERFLOWED
1395 indicates whether overflow has already occurred. CONST_OVERFLOWED
1396 indicates whether constant overflow has already occurred. We force
1397 T's value to be within range of T's type (by setting to 0 or 1 all
1398 the bits outside the type's range). We set TREE_OVERFLOWED if,
1399 OVERFLOWED is nonzero,
1400 or OVERFLOWABLE is >0 and signed overflow occurs
1401 or OVERFLOWABLE is <0 and any overflow occurs
1402 We return a new tree node for the extended wide_int. The node
1403 is shared if no overflow flags are set. */
1406 tree
1407 force_fit_type (tree type, const poly_wide_int_ref &cst,
1408 int overflowable, bool overflowed)
1410 signop sign = TYPE_SIGN (type);
1412 /* If we need to set overflow flags, return a new unshared node. */
1413 if (overflowed || !wi::fits_to_tree_p (cst, type))
1415 if (overflowed
1416 || overflowable < 0
1417 || (overflowable > 0 && sign == SIGNED))
1419 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1420 sign);
1421 tree t;
1422 if (tmp.is_constant ())
1423 t = build_new_int_cst (type, tmp.coeffs[0]);
1424 else
1426 tree coeffs[NUM_POLY_INT_COEFFS];
1427 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1429 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1430 TREE_OVERFLOW (coeffs[i]) = 1;
1432 t = build_new_poly_int_cst (type, coeffs);
1434 TREE_OVERFLOW (t) = 1;
1435 return t;
1439 /* Else build a shared node. */
1440 return wide_int_to_tree (type, cst);
1443 /* These are the hash table functions for the hash table of INTEGER_CST
1444 nodes of a sizetype. */
1446 /* Return the hash code X, an INTEGER_CST. */
1448 hashval_t
1449 int_cst_hasher::hash (tree x)
1451 const_tree const t = x;
1452 hashval_t code = TYPE_UID (TREE_TYPE (t));
1453 int i;
1455 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1456 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1458 return code;
1461 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1462 is the same as that given by *Y, which is the same. */
1464 bool
1465 int_cst_hasher::equal (tree x, tree y)
1467 const_tree const xt = x;
1468 const_tree const yt = y;
1470 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1471 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1472 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1473 return false;
1475 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1476 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1477 return false;
1479 return true;
1482 /* Create an INT_CST node of TYPE and value CST.
1483 The returned node is always shared. For small integers we use a
1484 per-type vector cache, for larger ones we use a single hash table.
1485 The value is extended from its precision according to the sign of
1486 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1487 the upper bits and ensures that hashing and value equality based
1488 upon the underlying HOST_WIDE_INTs works without masking. */
1490 static tree
1491 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1493 tree t;
1494 int ix = -1;
1495 int limit = 0;
1497 gcc_assert (type);
1498 unsigned int prec = TYPE_PRECISION (type);
1499 signop sgn = TYPE_SIGN (type);
1501 /* Verify that everything is canonical. */
1502 int l = pcst.get_len ();
1503 if (l > 1)
1505 if (pcst.elt (l - 1) == 0)
1506 gcc_checking_assert (pcst.elt (l - 2) < 0);
1507 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1508 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1511 wide_int cst = wide_int::from (pcst, prec, sgn);
1512 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1514 if (ext_len == 1)
1516 /* We just need to store a single HOST_WIDE_INT. */
1517 HOST_WIDE_INT hwi;
1518 if (TYPE_UNSIGNED (type))
1519 hwi = cst.to_uhwi ();
1520 else
1521 hwi = cst.to_shwi ();
1523 switch (TREE_CODE (type))
1525 case NULLPTR_TYPE:
1526 gcc_assert (hwi == 0);
1527 /* Fallthru. */
1529 case POINTER_TYPE:
1530 case REFERENCE_TYPE:
1531 /* Cache NULL pointer and zero bounds. */
1532 if (hwi == 0)
1534 limit = 1;
1535 ix = 0;
1537 break;
1539 case BOOLEAN_TYPE:
1540 /* Cache false or true. */
1541 limit = 2;
1542 if (IN_RANGE (hwi, 0, 1))
1543 ix = hwi;
1544 break;
1546 case INTEGER_TYPE:
1547 case OFFSET_TYPE:
1548 if (TYPE_SIGN (type) == UNSIGNED)
1550 /* Cache [0, N). */
1551 limit = INTEGER_SHARE_LIMIT;
1552 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1553 ix = hwi;
1555 else
1557 /* Cache [-1, N). */
1558 limit = INTEGER_SHARE_LIMIT + 1;
1559 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1560 ix = hwi + 1;
1562 break;
1564 case ENUMERAL_TYPE:
1565 break;
1567 default:
1568 gcc_unreachable ();
1571 if (ix >= 0)
1573 /* Look for it in the type's vector of small shared ints. */
1574 if (!TYPE_CACHED_VALUES_P (type))
1576 TYPE_CACHED_VALUES_P (type) = 1;
1577 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1580 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1581 if (t)
1582 /* Make sure no one is clobbering the shared constant. */
1583 gcc_checking_assert (TREE_TYPE (t) == type
1584 && TREE_INT_CST_NUNITS (t) == 1
1585 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1586 && TREE_INT_CST_EXT_NUNITS (t) == 1
1587 && TREE_INT_CST_ELT (t, 0) == hwi);
1588 else
1590 /* Create a new shared int. */
1591 t = build_new_int_cst (type, cst);
1592 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1595 else
1597 /* Use the cache of larger shared ints, using int_cst_node as
1598 a temporary. */
1600 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1601 TREE_TYPE (int_cst_node) = type;
1603 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1604 t = *slot;
1605 if (!t)
1607 /* Insert this one into the hash table. */
1608 t = int_cst_node;
1609 *slot = t;
1610 /* Make a new node for next time round. */
1611 int_cst_node = make_int_cst (1, 1);
1615 else
1617 /* The value either hashes properly or we drop it on the floor
1618 for the gc to take care of. There will not be enough of them
1619 to worry about. */
1621 tree nt = build_new_int_cst (type, cst);
1622 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1623 t = *slot;
1624 if (!t)
1626 /* Insert this one into the hash table. */
1627 t = nt;
1628 *slot = t;
1630 else
1631 ggc_free (nt);
1634 return t;
1637 hashval_t
1638 poly_int_cst_hasher::hash (tree t)
1640 inchash::hash hstate;
1642 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1643 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1644 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1646 return hstate.end ();
1649 bool
1650 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1652 if (TREE_TYPE (x) != y.first)
1653 return false;
1654 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1655 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1656 return false;
1657 return true;
1660 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1661 The elements must also have type TYPE. */
1663 tree
1664 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1666 unsigned int prec = TYPE_PRECISION (type);
1667 gcc_assert (prec <= values.coeffs[0].get_precision ());
1668 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1670 inchash::hash h;
1671 h.add_int (TYPE_UID (type));
1672 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1673 h.add_wide_int (c.coeffs[i]);
1674 poly_int_cst_hasher::compare_type comp (type, &c);
1675 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1676 INSERT);
1677 if (*slot == NULL_TREE)
1679 tree coeffs[NUM_POLY_INT_COEFFS];
1680 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1681 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1682 *slot = build_new_poly_int_cst (type, coeffs);
1684 return *slot;
1687 /* Create a constant tree with value VALUE in type TYPE. */
1689 tree
1690 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1692 if (value.is_constant ())
1693 return wide_int_to_tree_1 (type, value.coeffs[0]);
1694 return build_poly_int_cst (type, value);
1697 void
1698 cache_integer_cst (tree t)
1700 tree type = TREE_TYPE (t);
1701 int ix = -1;
1702 int limit = 0;
1703 int prec = TYPE_PRECISION (type);
1705 gcc_assert (!TREE_OVERFLOW (t));
1707 switch (TREE_CODE (type))
1709 case NULLPTR_TYPE:
1710 gcc_assert (integer_zerop (t));
1711 /* Fallthru. */
1713 case POINTER_TYPE:
1714 case REFERENCE_TYPE:
1715 /* Cache NULL pointer. */
1716 if (integer_zerop (t))
1718 limit = 1;
1719 ix = 0;
1721 break;
1723 case BOOLEAN_TYPE:
1724 /* Cache false or true. */
1725 limit = 2;
1726 if (wi::ltu_p (wi::to_wide (t), 2))
1727 ix = TREE_INT_CST_ELT (t, 0);
1728 break;
1730 case INTEGER_TYPE:
1731 case OFFSET_TYPE:
1732 if (TYPE_UNSIGNED (type))
1734 /* Cache 0..N */
1735 limit = INTEGER_SHARE_LIMIT;
1737 /* This is a little hokie, but if the prec is smaller than
1738 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1739 obvious test will not get the correct answer. */
1740 if (prec < HOST_BITS_PER_WIDE_INT)
1742 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1743 ix = tree_to_uhwi (t);
1745 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1746 ix = tree_to_uhwi (t);
1748 else
1750 /* Cache -1..N */
1751 limit = INTEGER_SHARE_LIMIT + 1;
1753 if (integer_minus_onep (t))
1754 ix = 0;
1755 else if (!wi::neg_p (wi::to_wide (t)))
1757 if (prec < HOST_BITS_PER_WIDE_INT)
1759 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1760 ix = tree_to_shwi (t) + 1;
1762 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1763 ix = tree_to_shwi (t) + 1;
1766 break;
1768 case ENUMERAL_TYPE:
1769 break;
1771 default:
1772 gcc_unreachable ();
1775 if (ix >= 0)
1777 /* Look for it in the type's vector of small shared ints. */
1778 if (!TYPE_CACHED_VALUES_P (type))
1780 TYPE_CACHED_VALUES_P (type) = 1;
1781 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1784 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1785 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1787 else
1789 /* Use the cache of larger shared ints. */
1790 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1791 /* If there is already an entry for the number verify it's the
1792 same. */
1793 if (*slot)
1794 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1795 else
1796 /* Otherwise insert this one into the hash table. */
1797 *slot = t;
1802 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1803 and the rest are zeros. */
1805 tree
1806 build_low_bits_mask (tree type, unsigned bits)
1808 gcc_assert (bits <= TYPE_PRECISION (type));
1810 return wide_int_to_tree (type, wi::mask (bits, false,
1811 TYPE_PRECISION (type)));
1814 /* Checks that X is integer constant that can be expressed in (unsigned)
1815 HOST_WIDE_INT without loss of precision. */
1817 bool
1818 cst_and_fits_in_hwi (const_tree x)
1820 return (TREE_CODE (x) == INTEGER_CST
1821 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1824 /* Build a newly constructed VECTOR_CST with the given values of
1825 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1827 tree
1828 make_vector (unsigned log2_npatterns,
1829 unsigned int nelts_per_pattern MEM_STAT_DECL)
1831 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1832 tree t;
1833 unsigned npatterns = 1 << log2_npatterns;
1834 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1835 unsigned length = (sizeof (struct tree_vector)
1836 + (encoded_nelts - 1) * sizeof (tree));
1838 record_node_allocation_statistics (VECTOR_CST, length);
1840 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1842 TREE_SET_CODE (t, VECTOR_CST);
1843 TREE_CONSTANT (t) = 1;
1844 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1845 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1847 return t;
1850 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1851 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1853 tree
1854 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1856 unsigned HOST_WIDE_INT idx, nelts;
1857 tree value;
1859 /* We can't construct a VECTOR_CST for a variable number of elements. */
1860 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1861 tree_vector_builder vec (type, nelts, 1);
1862 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1864 if (TREE_CODE (value) == VECTOR_CST)
1866 /* If NELTS is constant then this must be too. */
1867 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1868 for (unsigned i = 0; i < sub_nelts; ++i)
1869 vec.quick_push (VECTOR_CST_ELT (value, i));
1871 else
1872 vec.quick_push (value);
1874 while (vec.length () < nelts)
1875 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1877 return vec.build ();
1880 /* Build a vector of type VECTYPE where all the elements are SCs. */
1881 tree
1882 build_vector_from_val (tree vectype, tree sc)
1884 unsigned HOST_WIDE_INT i, nunits;
1886 if (sc == error_mark_node)
1887 return sc;
1889 /* Verify that the vector type is suitable for SC. Note that there
1890 is some inconsistency in the type-system with respect to restrict
1891 qualifications of pointers. Vector types always have a main-variant
1892 element type and the qualification is applied to the vector-type.
1893 So TREE_TYPE (vector-type) does not return a properly qualified
1894 vector element-type. */
1895 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1896 TREE_TYPE (vectype)));
1898 if (CONSTANT_CLASS_P (sc))
1900 tree_vector_builder v (vectype, 1, 1);
1901 v.quick_push (sc);
1902 return v.build ();
1904 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1905 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1906 else
1908 vec<constructor_elt, va_gc> *v;
1909 vec_alloc (v, nunits);
1910 for (i = 0; i < nunits; ++i)
1911 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1912 return build_constructor (vectype, v);
1916 /* If TYPE is not a vector type, just return SC, otherwise return
1917 build_vector_from_val (TYPE, SC). */
1919 tree
1920 build_uniform_cst (tree type, tree sc)
1922 if (!VECTOR_TYPE_P (type))
1923 return sc;
1925 return build_vector_from_val (type, sc);
1928 /* Build a vector series of type TYPE in which element I has the value
1929 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1930 and a VEC_SERIES_EXPR otherwise. */
1932 tree
1933 build_vec_series (tree type, tree base, tree step)
1935 if (integer_zerop (step))
1936 return build_vector_from_val (type, base);
1937 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1939 tree_vector_builder builder (type, 1, 3);
1940 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1941 wi::to_wide (base) + wi::to_wide (step));
1942 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1943 wi::to_wide (elt1) + wi::to_wide (step));
1944 builder.quick_push (base);
1945 builder.quick_push (elt1);
1946 builder.quick_push (elt2);
1947 return builder.build ();
1949 return build2 (VEC_SERIES_EXPR, type, base, step);
1952 /* Return a vector with the same number of units and number of bits
1953 as VEC_TYPE, but in which the elements are a linear series of unsigned
1954 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1956 tree
1957 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1959 tree index_vec_type = vec_type;
1960 tree index_elt_type = TREE_TYPE (vec_type);
1961 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1962 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1964 index_elt_type = build_nonstandard_integer_type
1965 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1966 index_vec_type = build_vector_type (index_elt_type, nunits);
1969 tree_vector_builder v (index_vec_type, 1, 3);
1970 for (unsigned int i = 0; i < 3; ++i)
1971 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1972 return v.build ();
1975 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1976 elements are A and the rest are B. */
1978 tree
1979 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1981 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1982 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1983 /* Optimize the constant case. */
1984 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1985 count /= 2;
1986 tree_vector_builder builder (vec_type, count, 2);
1987 for (unsigned int i = 0; i < count * 2; ++i)
1988 builder.quick_push (i < num_a ? a : b);
1989 return builder.build ();
1992 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1993 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1995 void
1996 recompute_constructor_flags (tree c)
1998 unsigned int i;
1999 tree val;
2000 bool constant_p = true;
2001 bool side_effects_p = false;
2002 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2004 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2006 /* Mostly ctors will have elts that don't have side-effects, so
2007 the usual case is to scan all the elements. Hence a single
2008 loop for both const and side effects, rather than one loop
2009 each (with early outs). */
2010 if (!TREE_CONSTANT (val))
2011 constant_p = false;
2012 if (TREE_SIDE_EFFECTS (val))
2013 side_effects_p = true;
2016 TREE_SIDE_EFFECTS (c) = side_effects_p;
2017 TREE_CONSTANT (c) = constant_p;
2020 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2021 CONSTRUCTOR C. */
2023 void
2024 verify_constructor_flags (tree c)
2026 unsigned int i;
2027 tree val;
2028 bool constant_p = TREE_CONSTANT (c);
2029 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2030 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2032 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2034 if (constant_p && !TREE_CONSTANT (val))
2035 internal_error ("non-constant element in constant CONSTRUCTOR");
2036 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2037 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2041 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2042 are in the vec pointed to by VALS. */
2043 tree
2044 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2046 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2048 TREE_TYPE (c) = type;
2049 CONSTRUCTOR_ELTS (c) = vals;
2051 recompute_constructor_flags (c);
2053 return c;
2056 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2057 INDEX and VALUE. */
2058 tree
2059 build_constructor_single (tree type, tree index, tree value)
2061 vec<constructor_elt, va_gc> *v;
2062 constructor_elt elt = {index, value};
2064 vec_alloc (v, 1);
2065 v->quick_push (elt);
2067 return build_constructor (type, v);
2071 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2072 are in a list pointed to by VALS. */
2073 tree
2074 build_constructor_from_list (tree type, tree vals)
2076 tree t;
2077 vec<constructor_elt, va_gc> *v = NULL;
2079 if (vals)
2081 vec_alloc (v, list_length (vals));
2082 for (t = vals; t; t = TREE_CHAIN (t))
2083 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2086 return build_constructor (type, v);
2089 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2090 of elements, provided as index/value pairs. */
2092 tree
2093 build_constructor_va (tree type, int nelts, ...)
2095 vec<constructor_elt, va_gc> *v = NULL;
2096 va_list p;
2098 va_start (p, nelts);
2099 vec_alloc (v, nelts);
2100 while (nelts--)
2102 tree index = va_arg (p, tree);
2103 tree value = va_arg (p, tree);
2104 CONSTRUCTOR_APPEND_ELT (v, index, value);
2106 va_end (p);
2107 return build_constructor (type, v);
2110 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2112 tree
2113 build_clobber (tree type)
2115 tree clobber = build_constructor (type, NULL);
2116 TREE_THIS_VOLATILE (clobber) = true;
2117 return clobber;
2120 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2122 tree
2123 build_fixed (tree type, FIXED_VALUE_TYPE f)
2125 tree v;
2126 FIXED_VALUE_TYPE *fp;
2128 v = make_node (FIXED_CST);
2129 fp = ggc_alloc<fixed_value> ();
2130 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2132 TREE_TYPE (v) = type;
2133 TREE_FIXED_CST_PTR (v) = fp;
2134 return v;
2137 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2139 tree
2140 build_real (tree type, REAL_VALUE_TYPE d)
2142 tree v;
2143 REAL_VALUE_TYPE *dp;
2144 int overflow = 0;
2146 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2147 Consider doing it via real_convert now. */
2149 v = make_node (REAL_CST);
2150 dp = ggc_alloc<real_value> ();
2151 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2153 TREE_TYPE (v) = type;
2154 TREE_REAL_CST_PTR (v) = dp;
2155 TREE_OVERFLOW (v) = overflow;
2156 return v;
2159 /* Like build_real, but first truncate D to the type. */
2161 tree
2162 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2164 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2167 /* Return a new REAL_CST node whose type is TYPE
2168 and whose value is the integer value of the INTEGER_CST node I. */
2170 REAL_VALUE_TYPE
2171 real_value_from_int_cst (const_tree type, const_tree i)
2173 REAL_VALUE_TYPE d;
2175 /* Clear all bits of the real value type so that we can later do
2176 bitwise comparisons to see if two values are the same. */
2177 memset (&d, 0, sizeof d);
2179 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2180 TYPE_SIGN (TREE_TYPE (i)));
2181 return d;
2184 /* Given a tree representing an integer constant I, return a tree
2185 representing the same value as a floating-point constant of type TYPE. */
2187 tree
2188 build_real_from_int_cst (tree type, const_tree i)
2190 tree v;
2191 int overflow = TREE_OVERFLOW (i);
2193 v = build_real (type, real_value_from_int_cst (type, i));
2195 TREE_OVERFLOW (v) |= overflow;
2196 return v;
2199 /* Return a newly constructed STRING_CST node whose value is
2200 the LEN characters at STR.
2201 Note that for a C string literal, LEN should include the trailing NUL.
2202 The TREE_TYPE is not initialized. */
2204 tree
2205 build_string (int len, const char *str)
2207 tree s;
2208 size_t length;
2210 /* Do not waste bytes provided by padding of struct tree_string. */
2211 length = len + offsetof (struct tree_string, str) + 1;
2213 record_node_allocation_statistics (STRING_CST, length);
2215 s = (tree) ggc_internal_alloc (length);
2217 memset (s, 0, sizeof (struct tree_typed));
2218 TREE_SET_CODE (s, STRING_CST);
2219 TREE_CONSTANT (s) = 1;
2220 TREE_STRING_LENGTH (s) = len;
2221 memcpy (s->string.str, str, len);
2222 s->string.str[len] = '\0';
2224 return s;
2227 /* Return a newly constructed COMPLEX_CST node whose value is
2228 specified by the real and imaginary parts REAL and IMAG.
2229 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2230 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2232 tree
2233 build_complex (tree type, tree real, tree imag)
2235 gcc_assert (CONSTANT_CLASS_P (real));
2236 gcc_assert (CONSTANT_CLASS_P (imag));
2238 tree t = make_node (COMPLEX_CST);
2240 TREE_REALPART (t) = real;
2241 TREE_IMAGPART (t) = imag;
2242 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2243 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2244 return t;
2247 /* Build a complex (inf +- 0i), such as for the result of cproj.
2248 TYPE is the complex tree type of the result. If NEG is true, the
2249 imaginary zero is negative. */
2251 tree
2252 build_complex_inf (tree type, bool neg)
2254 REAL_VALUE_TYPE rinf, rzero = dconst0;
2256 real_inf (&rinf);
2257 rzero.sign = neg;
2258 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2259 build_real (TREE_TYPE (type), rzero));
2262 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2263 element is set to 1. In particular, this is 1 + i for complex types. */
2265 tree
2266 build_each_one_cst (tree type)
2268 if (TREE_CODE (type) == COMPLEX_TYPE)
2270 tree scalar = build_one_cst (TREE_TYPE (type));
2271 return build_complex (type, scalar, scalar);
2273 else
2274 return build_one_cst (type);
2277 /* Return a constant of arithmetic type TYPE which is the
2278 multiplicative identity of the set TYPE. */
2280 tree
2281 build_one_cst (tree type)
2283 switch (TREE_CODE (type))
2285 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2286 case POINTER_TYPE: case REFERENCE_TYPE:
2287 case OFFSET_TYPE:
2288 return build_int_cst (type, 1);
2290 case REAL_TYPE:
2291 return build_real (type, dconst1);
2293 case FIXED_POINT_TYPE:
2294 /* We can only generate 1 for accum types. */
2295 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2296 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2298 case VECTOR_TYPE:
2300 tree scalar = build_one_cst (TREE_TYPE (type));
2302 return build_vector_from_val (type, scalar);
2305 case COMPLEX_TYPE:
2306 return build_complex (type,
2307 build_one_cst (TREE_TYPE (type)),
2308 build_zero_cst (TREE_TYPE (type)));
2310 default:
2311 gcc_unreachable ();
2315 /* Return an integer of type TYPE containing all 1's in as much precision as
2316 it contains, or a complex or vector whose subparts are such integers. */
2318 tree
2319 build_all_ones_cst (tree type)
2321 if (TREE_CODE (type) == COMPLEX_TYPE)
2323 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2324 return build_complex (type, scalar, scalar);
2326 else
2327 return build_minus_one_cst (type);
2330 /* Return a constant of arithmetic type TYPE which is the
2331 opposite of the multiplicative identity of the set TYPE. */
2333 tree
2334 build_minus_one_cst (tree type)
2336 switch (TREE_CODE (type))
2338 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2339 case POINTER_TYPE: case REFERENCE_TYPE:
2340 case OFFSET_TYPE:
2341 return build_int_cst (type, -1);
2343 case REAL_TYPE:
2344 return build_real (type, dconstm1);
2346 case FIXED_POINT_TYPE:
2347 /* We can only generate 1 for accum types. */
2348 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2349 return build_fixed (type,
2350 fixed_from_double_int (double_int_minus_one,
2351 SCALAR_TYPE_MODE (type)));
2353 case VECTOR_TYPE:
2355 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2357 return build_vector_from_val (type, scalar);
2360 case COMPLEX_TYPE:
2361 return build_complex (type,
2362 build_minus_one_cst (TREE_TYPE (type)),
2363 build_zero_cst (TREE_TYPE (type)));
2365 default:
2366 gcc_unreachable ();
2370 /* Build 0 constant of type TYPE. This is used by constructor folding
2371 and thus the constant should be represented in memory by
2372 zero(es). */
2374 tree
2375 build_zero_cst (tree type)
2377 switch (TREE_CODE (type))
2379 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2380 case POINTER_TYPE: case REFERENCE_TYPE:
2381 case OFFSET_TYPE: case NULLPTR_TYPE:
2382 return build_int_cst (type, 0);
2384 case REAL_TYPE:
2385 return build_real (type, dconst0);
2387 case FIXED_POINT_TYPE:
2388 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2390 case VECTOR_TYPE:
2392 tree scalar = build_zero_cst (TREE_TYPE (type));
2394 return build_vector_from_val (type, scalar);
2397 case COMPLEX_TYPE:
2399 tree zero = build_zero_cst (TREE_TYPE (type));
2401 return build_complex (type, zero, zero);
2404 default:
2405 if (!AGGREGATE_TYPE_P (type))
2406 return fold_convert (type, integer_zero_node);
2407 return build_constructor (type, NULL);
2412 /* Build a BINFO with LEN language slots. */
2414 tree
2415 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2417 tree t;
2418 size_t length = (offsetof (struct tree_binfo, base_binfos)
2419 + vec<tree, va_gc>::embedded_size (base_binfos));
2421 record_node_allocation_statistics (TREE_BINFO, length);
2423 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2425 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2427 TREE_SET_CODE (t, TREE_BINFO);
2429 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2431 return t;
2434 /* Create a CASE_LABEL_EXPR tree node and return it. */
2436 tree
2437 build_case_label (tree low_value, tree high_value, tree label_decl)
2439 tree t = make_node (CASE_LABEL_EXPR);
2441 TREE_TYPE (t) = void_type_node;
2442 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2444 CASE_LOW (t) = low_value;
2445 CASE_HIGH (t) = high_value;
2446 CASE_LABEL (t) = label_decl;
2447 CASE_CHAIN (t) = NULL_TREE;
2449 return t;
2452 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2453 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2454 The latter determines the length of the HOST_WIDE_INT vector. */
2456 tree
2457 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2459 tree t;
2460 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2461 + sizeof (struct tree_int_cst));
2463 gcc_assert (len);
2464 record_node_allocation_statistics (INTEGER_CST, length);
2466 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2468 TREE_SET_CODE (t, INTEGER_CST);
2469 TREE_INT_CST_NUNITS (t) = len;
2470 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2471 /* to_offset can only be applied to trees that are offset_int-sized
2472 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2473 must be exactly the precision of offset_int and so LEN is correct. */
2474 if (ext_len <= OFFSET_INT_ELTS)
2475 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2476 else
2477 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2479 TREE_CONSTANT (t) = 1;
2481 return t;
2484 /* Build a newly constructed TREE_VEC node of length LEN. */
2486 tree
2487 make_tree_vec (int len MEM_STAT_DECL)
2489 tree t;
2490 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2492 record_node_allocation_statistics (TREE_VEC, length);
2494 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2496 TREE_SET_CODE (t, TREE_VEC);
2497 TREE_VEC_LENGTH (t) = len;
2499 return t;
2502 /* Grow a TREE_VEC node to new length LEN. */
2504 tree
2505 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2507 gcc_assert (TREE_CODE (v) == TREE_VEC);
2509 int oldlen = TREE_VEC_LENGTH (v);
2510 gcc_assert (len > oldlen);
2512 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2513 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2515 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2517 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2519 TREE_VEC_LENGTH (v) = len;
2521 return v;
2524 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2525 fixed, and scalar, complex or vector. */
2527 bool
2528 zerop (const_tree expr)
2530 return (integer_zerop (expr)
2531 || real_zerop (expr)
2532 || fixed_zerop (expr));
2535 /* Return 1 if EXPR is the integer constant zero or a complex constant
2536 of zero, or a location wrapper for such a constant. */
2538 bool
2539 integer_zerop (const_tree expr)
2541 STRIP_ANY_LOCATION_WRAPPER (expr);
2543 switch (TREE_CODE (expr))
2545 case INTEGER_CST:
2546 return wi::to_wide (expr) == 0;
2547 case COMPLEX_CST:
2548 return (integer_zerop (TREE_REALPART (expr))
2549 && integer_zerop (TREE_IMAGPART (expr)));
2550 case VECTOR_CST:
2551 return (VECTOR_CST_NPATTERNS (expr) == 1
2552 && VECTOR_CST_DUPLICATE_P (expr)
2553 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2554 default:
2555 return false;
2559 /* Return 1 if EXPR is the integer constant one or the corresponding
2560 complex constant, or a location wrapper for such a constant. */
2562 bool
2563 integer_onep (const_tree expr)
2565 STRIP_ANY_LOCATION_WRAPPER (expr);
2567 switch (TREE_CODE (expr))
2569 case INTEGER_CST:
2570 return wi::eq_p (wi::to_widest (expr), 1);
2571 case COMPLEX_CST:
2572 return (integer_onep (TREE_REALPART (expr))
2573 && integer_zerop (TREE_IMAGPART (expr)));
2574 case VECTOR_CST:
2575 return (VECTOR_CST_NPATTERNS (expr) == 1
2576 && VECTOR_CST_DUPLICATE_P (expr)
2577 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2578 default:
2579 return false;
2583 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2584 return 1 if every piece is the integer constant one.
2585 Also return 1 for location wrappers for such a constant. */
2587 bool
2588 integer_each_onep (const_tree expr)
2590 STRIP_ANY_LOCATION_WRAPPER (expr);
2592 if (TREE_CODE (expr) == COMPLEX_CST)
2593 return (integer_onep (TREE_REALPART (expr))
2594 && integer_onep (TREE_IMAGPART (expr)));
2595 else
2596 return integer_onep (expr);
2599 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2600 it contains, or a complex or vector whose subparts are such integers,
2601 or a location wrapper for such a constant. */
2603 bool
2604 integer_all_onesp (const_tree expr)
2606 STRIP_ANY_LOCATION_WRAPPER (expr);
2608 if (TREE_CODE (expr) == COMPLEX_CST
2609 && integer_all_onesp (TREE_REALPART (expr))
2610 && integer_all_onesp (TREE_IMAGPART (expr)))
2611 return true;
2613 else if (TREE_CODE (expr) == VECTOR_CST)
2614 return (VECTOR_CST_NPATTERNS (expr) == 1
2615 && VECTOR_CST_DUPLICATE_P (expr)
2616 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2618 else if (TREE_CODE (expr) != INTEGER_CST)
2619 return false;
2621 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2622 == wi::to_wide (expr));
2625 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2626 for such a constant. */
2628 bool
2629 integer_minus_onep (const_tree expr)
2631 STRIP_ANY_LOCATION_WRAPPER (expr);
2633 if (TREE_CODE (expr) == COMPLEX_CST)
2634 return (integer_all_onesp (TREE_REALPART (expr))
2635 && integer_zerop (TREE_IMAGPART (expr)));
2636 else
2637 return integer_all_onesp (expr);
2640 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2641 one bit on), or a location wrapper for such a constant. */
2643 bool
2644 integer_pow2p (const_tree expr)
2646 STRIP_ANY_LOCATION_WRAPPER (expr);
2648 if (TREE_CODE (expr) == COMPLEX_CST
2649 && integer_pow2p (TREE_REALPART (expr))
2650 && integer_zerop (TREE_IMAGPART (expr)))
2651 return true;
2653 if (TREE_CODE (expr) != INTEGER_CST)
2654 return false;
2656 return wi::popcount (wi::to_wide (expr)) == 1;
2659 /* Return 1 if EXPR is an integer constant other than zero or a
2660 complex constant other than zero, or a location wrapper for such a
2661 constant. */
2663 bool
2664 integer_nonzerop (const_tree expr)
2666 STRIP_ANY_LOCATION_WRAPPER (expr);
2668 return ((TREE_CODE (expr) == INTEGER_CST
2669 && wi::to_wide (expr) != 0)
2670 || (TREE_CODE (expr) == COMPLEX_CST
2671 && (integer_nonzerop (TREE_REALPART (expr))
2672 || integer_nonzerop (TREE_IMAGPART (expr)))));
2675 /* Return 1 if EXPR is the integer constant one. For vector,
2676 return 1 if every piece is the integer constant minus one
2677 (representing the value TRUE).
2678 Also return 1 for location wrappers for such a constant. */
2680 bool
2681 integer_truep (const_tree expr)
2683 STRIP_ANY_LOCATION_WRAPPER (expr);
2685 if (TREE_CODE (expr) == VECTOR_CST)
2686 return integer_all_onesp (expr);
2687 return integer_onep (expr);
2690 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2691 for such a constant. */
2693 bool
2694 fixed_zerop (const_tree expr)
2696 STRIP_ANY_LOCATION_WRAPPER (expr);
2698 return (TREE_CODE (expr) == FIXED_CST
2699 && TREE_FIXED_CST (expr).data.is_zero ());
2702 /* Return the power of two represented by a tree node known to be a
2703 power of two. */
2706 tree_log2 (const_tree expr)
2708 if (TREE_CODE (expr) == COMPLEX_CST)
2709 return tree_log2 (TREE_REALPART (expr));
2711 return wi::exact_log2 (wi::to_wide (expr));
2714 /* Similar, but return the largest integer Y such that 2 ** Y is less
2715 than or equal to EXPR. */
2718 tree_floor_log2 (const_tree expr)
2720 if (TREE_CODE (expr) == COMPLEX_CST)
2721 return tree_log2 (TREE_REALPART (expr));
2723 return wi::floor_log2 (wi::to_wide (expr));
2726 /* Return number of known trailing zero bits in EXPR, or, if the value of
2727 EXPR is known to be zero, the precision of it's type. */
2729 unsigned int
2730 tree_ctz (const_tree expr)
2732 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2733 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2734 return 0;
2736 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2737 switch (TREE_CODE (expr))
2739 case INTEGER_CST:
2740 ret1 = wi::ctz (wi::to_wide (expr));
2741 return MIN (ret1, prec);
2742 case SSA_NAME:
2743 ret1 = wi::ctz (get_nonzero_bits (expr));
2744 return MIN (ret1, prec);
2745 case PLUS_EXPR:
2746 case MINUS_EXPR:
2747 case BIT_IOR_EXPR:
2748 case BIT_XOR_EXPR:
2749 case MIN_EXPR:
2750 case MAX_EXPR:
2751 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2752 if (ret1 == 0)
2753 return ret1;
2754 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2755 return MIN (ret1, ret2);
2756 case POINTER_PLUS_EXPR:
2757 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2758 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2759 /* Second operand is sizetype, which could be in theory
2760 wider than pointer's precision. Make sure we never
2761 return more than prec. */
2762 ret2 = MIN (ret2, prec);
2763 return MIN (ret1, ret2);
2764 case BIT_AND_EXPR:
2765 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2767 return MAX (ret1, ret2);
2768 case MULT_EXPR:
2769 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2770 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2771 return MIN (ret1 + ret2, prec);
2772 case LSHIFT_EXPR:
2773 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2774 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2775 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2777 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2778 return MIN (ret1 + ret2, prec);
2780 return ret1;
2781 case RSHIFT_EXPR:
2782 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2783 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2785 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2786 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2787 if (ret1 > ret2)
2788 return ret1 - ret2;
2790 return 0;
2791 case TRUNC_DIV_EXPR:
2792 case CEIL_DIV_EXPR:
2793 case FLOOR_DIV_EXPR:
2794 case ROUND_DIV_EXPR:
2795 case EXACT_DIV_EXPR:
2796 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2797 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2799 int l = tree_log2 (TREE_OPERAND (expr, 1));
2800 if (l >= 0)
2802 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2803 ret2 = l;
2804 if (ret1 > ret2)
2805 return ret1 - ret2;
2808 return 0;
2809 CASE_CONVERT:
2810 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2811 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2812 ret1 = prec;
2813 return MIN (ret1, prec);
2814 case SAVE_EXPR:
2815 return tree_ctz (TREE_OPERAND (expr, 0));
2816 case COND_EXPR:
2817 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2818 if (ret1 == 0)
2819 return 0;
2820 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2821 return MIN (ret1, ret2);
2822 case COMPOUND_EXPR:
2823 return tree_ctz (TREE_OPERAND (expr, 1));
2824 case ADDR_EXPR:
2825 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2826 if (ret1 > BITS_PER_UNIT)
2828 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2829 return MIN (ret1, prec);
2831 return 0;
2832 default:
2833 return 0;
2837 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2838 decimal float constants, so don't return 1 for them.
2839 Also return 1 for location wrappers around such a constant. */
2841 bool
2842 real_zerop (const_tree expr)
2844 STRIP_ANY_LOCATION_WRAPPER (expr);
2846 switch (TREE_CODE (expr))
2848 case REAL_CST:
2849 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2850 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2851 case COMPLEX_CST:
2852 return real_zerop (TREE_REALPART (expr))
2853 && real_zerop (TREE_IMAGPART (expr));
2854 case VECTOR_CST:
2856 /* Don't simply check for a duplicate because the predicate
2857 accepts both +0.0 and -0.0. */
2858 unsigned count = vector_cst_encoded_nelts (expr);
2859 for (unsigned int i = 0; i < count; ++i)
2860 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2861 return false;
2862 return true;
2864 default:
2865 return false;
2869 /* Return 1 if EXPR is the real constant one in real or complex form.
2870 Trailing zeroes matter for decimal float constants, so don't return
2871 1 for them.
2872 Also return 1 for location wrappers around such a constant. */
2874 bool
2875 real_onep (const_tree expr)
2877 STRIP_ANY_LOCATION_WRAPPER (expr);
2879 switch (TREE_CODE (expr))
2881 case REAL_CST:
2882 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2883 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2884 case COMPLEX_CST:
2885 return real_onep (TREE_REALPART (expr))
2886 && real_zerop (TREE_IMAGPART (expr));
2887 case VECTOR_CST:
2888 return (VECTOR_CST_NPATTERNS (expr) == 1
2889 && VECTOR_CST_DUPLICATE_P (expr)
2890 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2891 default:
2892 return false;
2896 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2897 matter for decimal float constants, so don't return 1 for them.
2898 Also return 1 for location wrappers around such a constant. */
2900 bool
2901 real_minus_onep (const_tree expr)
2903 STRIP_ANY_LOCATION_WRAPPER (expr);
2905 switch (TREE_CODE (expr))
2907 case REAL_CST:
2908 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2909 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2910 case COMPLEX_CST:
2911 return real_minus_onep (TREE_REALPART (expr))
2912 && real_zerop (TREE_IMAGPART (expr));
2913 case VECTOR_CST:
2914 return (VECTOR_CST_NPATTERNS (expr) == 1
2915 && VECTOR_CST_DUPLICATE_P (expr)
2916 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2917 default:
2918 return false;
2922 /* Nonzero if EXP is a constant or a cast of a constant. */
2924 bool
2925 really_constant_p (const_tree exp)
2927 /* This is not quite the same as STRIP_NOPS. It does more. */
2928 while (CONVERT_EXPR_P (exp)
2929 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2930 exp = TREE_OPERAND (exp, 0);
2931 return TREE_CONSTANT (exp);
2934 /* Return true if T holds a polynomial pointer difference, storing it in
2935 *VALUE if so. A true return means that T's precision is no greater
2936 than 64 bits, which is the largest address space we support, so *VALUE
2937 never loses precision. However, the signedness of the result does
2938 not necessarily match the signedness of T: sometimes an unsigned type
2939 like sizetype is used to encode a value that is actually negative. */
2941 bool
2942 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2944 if (!t)
2945 return false;
2946 if (TREE_CODE (t) == INTEGER_CST)
2948 if (!cst_and_fits_in_hwi (t))
2949 return false;
2950 *value = int_cst_value (t);
2951 return true;
2953 if (POLY_INT_CST_P (t))
2955 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2956 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2957 return false;
2958 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2959 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2960 return true;
2962 return false;
2965 poly_int64
2966 tree_to_poly_int64 (const_tree t)
2968 gcc_assert (tree_fits_poly_int64_p (t));
2969 if (POLY_INT_CST_P (t))
2970 return poly_int_cst_value (t).force_shwi ();
2971 return TREE_INT_CST_LOW (t);
2974 poly_uint64
2975 tree_to_poly_uint64 (const_tree t)
2977 gcc_assert (tree_fits_poly_uint64_p (t));
2978 if (POLY_INT_CST_P (t))
2979 return poly_int_cst_value (t).force_uhwi ();
2980 return TREE_INT_CST_LOW (t);
2983 /* Return first list element whose TREE_VALUE is ELEM.
2984 Return 0 if ELEM is not in LIST. */
2986 tree
2987 value_member (tree elem, tree list)
2989 while (list)
2991 if (elem == TREE_VALUE (list))
2992 return list;
2993 list = TREE_CHAIN (list);
2995 return NULL_TREE;
2998 /* Return first list element whose TREE_PURPOSE is ELEM.
2999 Return 0 if ELEM is not in LIST. */
3001 tree
3002 purpose_member (const_tree elem, tree list)
3004 while (list)
3006 if (elem == TREE_PURPOSE (list))
3007 return list;
3008 list = TREE_CHAIN (list);
3010 return NULL_TREE;
3013 /* Return true if ELEM is in V. */
3015 bool
3016 vec_member (const_tree elem, vec<tree, va_gc> *v)
3018 unsigned ix;
3019 tree t;
3020 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3021 if (elem == t)
3022 return true;
3023 return false;
3026 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3027 NULL_TREE. */
3029 tree
3030 chain_index (int idx, tree chain)
3032 for (; chain && idx > 0; --idx)
3033 chain = TREE_CHAIN (chain);
3034 return chain;
3037 /* Return nonzero if ELEM is part of the chain CHAIN. */
3039 bool
3040 chain_member (const_tree elem, const_tree chain)
3042 while (chain)
3044 if (elem == chain)
3045 return true;
3046 chain = DECL_CHAIN (chain);
3049 return false;
3052 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3053 We expect a null pointer to mark the end of the chain.
3054 This is the Lisp primitive `length'. */
3057 list_length (const_tree t)
3059 const_tree p = t;
3060 #ifdef ENABLE_TREE_CHECKING
3061 const_tree q = t;
3062 #endif
3063 int len = 0;
3065 while (p)
3067 p = TREE_CHAIN (p);
3068 #ifdef ENABLE_TREE_CHECKING
3069 if (len % 2)
3070 q = TREE_CHAIN (q);
3071 gcc_assert (p != q);
3072 #endif
3073 len++;
3076 return len;
3079 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3080 UNION_TYPE TYPE, or NULL_TREE if none. */
3082 tree
3083 first_field (const_tree type)
3085 tree t = TYPE_FIELDS (type);
3086 while (t && TREE_CODE (t) != FIELD_DECL)
3087 t = TREE_CHAIN (t);
3088 return t;
3091 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3092 by modifying the last node in chain 1 to point to chain 2.
3093 This is the Lisp primitive `nconc'. */
3095 tree
3096 chainon (tree op1, tree op2)
3098 tree t1;
3100 if (!op1)
3101 return op2;
3102 if (!op2)
3103 return op1;
3105 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3106 continue;
3107 TREE_CHAIN (t1) = op2;
3109 #ifdef ENABLE_TREE_CHECKING
3111 tree t2;
3112 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3113 gcc_assert (t2 != t1);
3115 #endif
3117 return op1;
3120 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3122 tree
3123 tree_last (tree chain)
3125 tree next;
3126 if (chain)
3127 while ((next = TREE_CHAIN (chain)))
3128 chain = next;
3129 return chain;
3132 /* Reverse the order of elements in the chain T,
3133 and return the new head of the chain (old last element). */
3135 tree
3136 nreverse (tree t)
3138 tree prev = 0, decl, next;
3139 for (decl = t; decl; decl = next)
3141 /* We shouldn't be using this function to reverse BLOCK chains; we
3142 have blocks_nreverse for that. */
3143 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3144 next = TREE_CHAIN (decl);
3145 TREE_CHAIN (decl) = prev;
3146 prev = decl;
3148 return prev;
3151 /* Return a newly created TREE_LIST node whose
3152 purpose and value fields are PARM and VALUE. */
3154 tree
3155 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3157 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3158 TREE_PURPOSE (t) = parm;
3159 TREE_VALUE (t) = value;
3160 return t;
3163 /* Build a chain of TREE_LIST nodes from a vector. */
3165 tree
3166 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3168 tree ret = NULL_TREE;
3169 tree *pp = &ret;
3170 unsigned int i;
3171 tree t;
3172 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3174 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3175 pp = &TREE_CHAIN (*pp);
3177 return ret;
3180 /* Return a newly created TREE_LIST node whose
3181 purpose and value fields are PURPOSE and VALUE
3182 and whose TREE_CHAIN is CHAIN. */
3184 tree
3185 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3187 tree node;
3189 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3190 memset (node, 0, sizeof (struct tree_common));
3192 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3194 TREE_SET_CODE (node, TREE_LIST);
3195 TREE_CHAIN (node) = chain;
3196 TREE_PURPOSE (node) = purpose;
3197 TREE_VALUE (node) = value;
3198 return node;
3201 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3202 trees. */
3204 vec<tree, va_gc> *
3205 ctor_to_vec (tree ctor)
3207 vec<tree, va_gc> *vec;
3208 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3209 unsigned int ix;
3210 tree val;
3212 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3213 vec->quick_push (val);
3215 return vec;
3218 /* Return the size nominally occupied by an object of type TYPE
3219 when it resides in memory. The value is measured in units of bytes,
3220 and its data type is that normally used for type sizes
3221 (which is the first type created by make_signed_type or
3222 make_unsigned_type). */
3224 tree
3225 size_in_bytes_loc (location_t loc, const_tree type)
3227 tree t;
3229 if (type == error_mark_node)
3230 return integer_zero_node;
3232 type = TYPE_MAIN_VARIANT (type);
3233 t = TYPE_SIZE_UNIT (type);
3235 if (t == 0)
3237 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3238 return size_zero_node;
3241 return t;
3244 /* Return the size of TYPE (in bytes) as a wide integer
3245 or return -1 if the size can vary or is larger than an integer. */
3247 HOST_WIDE_INT
3248 int_size_in_bytes (const_tree type)
3250 tree t;
3252 if (type == error_mark_node)
3253 return 0;
3255 type = TYPE_MAIN_VARIANT (type);
3256 t = TYPE_SIZE_UNIT (type);
3258 if (t && tree_fits_uhwi_p (t))
3259 return TREE_INT_CST_LOW (t);
3260 else
3261 return -1;
3264 /* Return the maximum size of TYPE (in bytes) as a wide integer
3265 or return -1 if the size can vary or is larger than an integer. */
3267 HOST_WIDE_INT
3268 max_int_size_in_bytes (const_tree type)
3270 HOST_WIDE_INT size = -1;
3271 tree size_tree;
3273 /* If this is an array type, check for a possible MAX_SIZE attached. */
3275 if (TREE_CODE (type) == ARRAY_TYPE)
3277 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3279 if (size_tree && tree_fits_uhwi_p (size_tree))
3280 size = tree_to_uhwi (size_tree);
3283 /* If we still haven't been able to get a size, see if the language
3284 can compute a maximum size. */
3286 if (size == -1)
3288 size_tree = lang_hooks.types.max_size (type);
3290 if (size_tree && tree_fits_uhwi_p (size_tree))
3291 size = tree_to_uhwi (size_tree);
3294 return size;
3297 /* Return the bit position of FIELD, in bits from the start of the record.
3298 This is a tree of type bitsizetype. */
3300 tree
3301 bit_position (const_tree field)
3303 return bit_from_pos (DECL_FIELD_OFFSET (field),
3304 DECL_FIELD_BIT_OFFSET (field));
3307 /* Return the byte position of FIELD, in bytes from the start of the record.
3308 This is a tree of type sizetype. */
3310 tree
3311 byte_position (const_tree field)
3313 return byte_from_pos (DECL_FIELD_OFFSET (field),
3314 DECL_FIELD_BIT_OFFSET (field));
3317 /* Likewise, but return as an integer. It must be representable in
3318 that way (since it could be a signed value, we don't have the
3319 option of returning -1 like int_size_in_byte can. */
3321 HOST_WIDE_INT
3322 int_byte_position (const_tree field)
3324 return tree_to_shwi (byte_position (field));
3327 /* Return the strictest alignment, in bits, that T is known to have. */
3329 unsigned int
3330 expr_align (const_tree t)
3332 unsigned int align0, align1;
3334 switch (TREE_CODE (t))
3336 CASE_CONVERT: case NON_LVALUE_EXPR:
3337 /* If we have conversions, we know that the alignment of the
3338 object must meet each of the alignments of the types. */
3339 align0 = expr_align (TREE_OPERAND (t, 0));
3340 align1 = TYPE_ALIGN (TREE_TYPE (t));
3341 return MAX (align0, align1);
3343 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3344 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3345 case CLEANUP_POINT_EXPR:
3346 /* These don't change the alignment of an object. */
3347 return expr_align (TREE_OPERAND (t, 0));
3349 case COND_EXPR:
3350 /* The best we can do is say that the alignment is the least aligned
3351 of the two arms. */
3352 align0 = expr_align (TREE_OPERAND (t, 1));
3353 align1 = expr_align (TREE_OPERAND (t, 2));
3354 return MIN (align0, align1);
3356 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3357 meaningfully, it's always 1. */
3358 case LABEL_DECL: case CONST_DECL:
3359 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3360 case FUNCTION_DECL:
3361 gcc_assert (DECL_ALIGN (t) != 0);
3362 return DECL_ALIGN (t);
3364 default:
3365 break;
3368 /* Otherwise take the alignment from that of the type. */
3369 return TYPE_ALIGN (TREE_TYPE (t));
3372 /* Return, as a tree node, the number of elements for TYPE (which is an
3373 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3375 tree
3376 array_type_nelts (const_tree type)
3378 tree index_type, min, max;
3380 /* If they did it with unspecified bounds, then we should have already
3381 given an error about it before we got here. */
3382 if (! TYPE_DOMAIN (type))
3383 return error_mark_node;
3385 index_type = TYPE_DOMAIN (type);
3386 min = TYPE_MIN_VALUE (index_type);
3387 max = TYPE_MAX_VALUE (index_type);
3389 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3390 if (!max)
3391 return error_mark_node;
3393 return (integer_zerop (min)
3394 ? max
3395 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3398 /* If arg is static -- a reference to an object in static storage -- then
3399 return the object. This is not the same as the C meaning of `static'.
3400 If arg isn't static, return NULL. */
3402 tree
3403 staticp (tree arg)
3405 switch (TREE_CODE (arg))
3407 case FUNCTION_DECL:
3408 /* Nested functions are static, even though taking their address will
3409 involve a trampoline as we unnest the nested function and create
3410 the trampoline on the tree level. */
3411 return arg;
3413 case VAR_DECL:
3414 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3415 && ! DECL_THREAD_LOCAL_P (arg)
3416 && ! DECL_DLLIMPORT_P (arg)
3417 ? arg : NULL);
3419 case CONST_DECL:
3420 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3421 ? arg : NULL);
3423 case CONSTRUCTOR:
3424 return TREE_STATIC (arg) ? arg : NULL;
3426 case LABEL_DECL:
3427 case STRING_CST:
3428 return arg;
3430 case COMPONENT_REF:
3431 /* If the thing being referenced is not a field, then it is
3432 something language specific. */
3433 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3435 /* If we are referencing a bitfield, we can't evaluate an
3436 ADDR_EXPR at compile time and so it isn't a constant. */
3437 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3438 return NULL;
3440 return staticp (TREE_OPERAND (arg, 0));
3442 case BIT_FIELD_REF:
3443 return NULL;
3445 case INDIRECT_REF:
3446 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3448 case ARRAY_REF:
3449 case ARRAY_RANGE_REF:
3450 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3451 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3452 return staticp (TREE_OPERAND (arg, 0));
3453 else
3454 return NULL;
3456 case COMPOUND_LITERAL_EXPR:
3457 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3459 default:
3460 return NULL;
3467 /* Return whether OP is a DECL whose address is function-invariant. */
3469 bool
3470 decl_address_invariant_p (const_tree op)
3472 /* The conditions below are slightly less strict than the one in
3473 staticp. */
3475 switch (TREE_CODE (op))
3477 case PARM_DECL:
3478 case RESULT_DECL:
3479 case LABEL_DECL:
3480 case FUNCTION_DECL:
3481 return true;
3483 case VAR_DECL:
3484 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3485 || DECL_THREAD_LOCAL_P (op)
3486 || DECL_CONTEXT (op) == current_function_decl
3487 || decl_function_context (op) == current_function_decl)
3488 return true;
3489 break;
3491 case CONST_DECL:
3492 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3493 || decl_function_context (op) == current_function_decl)
3494 return true;
3495 break;
3497 default:
3498 break;
3501 return false;
3504 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3506 bool
3507 decl_address_ip_invariant_p (const_tree op)
3509 /* The conditions below are slightly less strict than the one in
3510 staticp. */
3512 switch (TREE_CODE (op))
3514 case LABEL_DECL:
3515 case FUNCTION_DECL:
3516 case STRING_CST:
3517 return true;
3519 case VAR_DECL:
3520 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3521 && !DECL_DLLIMPORT_P (op))
3522 || DECL_THREAD_LOCAL_P (op))
3523 return true;
3524 break;
3526 case CONST_DECL:
3527 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3528 return true;
3529 break;
3531 default:
3532 break;
3535 return false;
3539 /* Return true if T is function-invariant (internal function, does
3540 not handle arithmetic; that's handled in skip_simple_arithmetic and
3541 tree_invariant_p). */
3543 static bool
3544 tree_invariant_p_1 (tree t)
3546 tree op;
3548 if (TREE_CONSTANT (t)
3549 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3550 return true;
3552 switch (TREE_CODE (t))
3554 case SAVE_EXPR:
3555 return true;
3557 case ADDR_EXPR:
3558 op = TREE_OPERAND (t, 0);
3559 while (handled_component_p (op))
3561 switch (TREE_CODE (op))
3563 case ARRAY_REF:
3564 case ARRAY_RANGE_REF:
3565 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3566 || TREE_OPERAND (op, 2) != NULL_TREE
3567 || TREE_OPERAND (op, 3) != NULL_TREE)
3568 return false;
3569 break;
3571 case COMPONENT_REF:
3572 if (TREE_OPERAND (op, 2) != NULL_TREE)
3573 return false;
3574 break;
3576 default:;
3578 op = TREE_OPERAND (op, 0);
3581 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3583 default:
3584 break;
3587 return false;
3590 /* Return true if T is function-invariant. */
3592 bool
3593 tree_invariant_p (tree t)
3595 tree inner = skip_simple_arithmetic (t);
3596 return tree_invariant_p_1 (inner);
3599 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3600 Do this to any expression which may be used in more than one place,
3601 but must be evaluated only once.
3603 Normally, expand_expr would reevaluate the expression each time.
3604 Calling save_expr produces something that is evaluated and recorded
3605 the first time expand_expr is called on it. Subsequent calls to
3606 expand_expr just reuse the recorded value.
3608 The call to expand_expr that generates code that actually computes
3609 the value is the first call *at compile time*. Subsequent calls
3610 *at compile time* generate code to use the saved value.
3611 This produces correct result provided that *at run time* control
3612 always flows through the insns made by the first expand_expr
3613 before reaching the other places where the save_expr was evaluated.
3614 You, the caller of save_expr, must make sure this is so.
3616 Constants, and certain read-only nodes, are returned with no
3617 SAVE_EXPR because that is safe. Expressions containing placeholders
3618 are not touched; see tree.def for an explanation of what these
3619 are used for. */
3621 tree
3622 save_expr (tree expr)
3624 tree inner;
3626 /* If the tree evaluates to a constant, then we don't want to hide that
3627 fact (i.e. this allows further folding, and direct checks for constants).
3628 However, a read-only object that has side effects cannot be bypassed.
3629 Since it is no problem to reevaluate literals, we just return the
3630 literal node. */
3631 inner = skip_simple_arithmetic (expr);
3632 if (TREE_CODE (inner) == ERROR_MARK)
3633 return inner;
3635 if (tree_invariant_p_1 (inner))
3636 return expr;
3638 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3639 it means that the size or offset of some field of an object depends on
3640 the value within another field.
3642 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3643 and some variable since it would then need to be both evaluated once and
3644 evaluated more than once. Front-ends must assure this case cannot
3645 happen by surrounding any such subexpressions in their own SAVE_EXPR
3646 and forcing evaluation at the proper time. */
3647 if (contains_placeholder_p (inner))
3648 return expr;
3650 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3652 /* This expression might be placed ahead of a jump to ensure that the
3653 value was computed on both sides of the jump. So make sure it isn't
3654 eliminated as dead. */
3655 TREE_SIDE_EFFECTS (expr) = 1;
3656 return expr;
3659 /* Look inside EXPR into any simple arithmetic operations. Return the
3660 outermost non-arithmetic or non-invariant node. */
3662 tree
3663 skip_simple_arithmetic (tree expr)
3665 /* We don't care about whether this can be used as an lvalue in this
3666 context. */
3667 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3668 expr = TREE_OPERAND (expr, 0);
3670 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3671 a constant, it will be more efficient to not make another SAVE_EXPR since
3672 it will allow better simplification and GCSE will be able to merge the
3673 computations if they actually occur. */
3674 while (true)
3676 if (UNARY_CLASS_P (expr))
3677 expr = TREE_OPERAND (expr, 0);
3678 else if (BINARY_CLASS_P (expr))
3680 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3681 expr = TREE_OPERAND (expr, 0);
3682 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3683 expr = TREE_OPERAND (expr, 1);
3684 else
3685 break;
3687 else
3688 break;
3691 return expr;
3694 /* Look inside EXPR into simple arithmetic operations involving constants.
3695 Return the outermost non-arithmetic or non-constant node. */
3697 tree
3698 skip_simple_constant_arithmetic (tree expr)
3700 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3701 expr = TREE_OPERAND (expr, 0);
3703 while (true)
3705 if (UNARY_CLASS_P (expr))
3706 expr = TREE_OPERAND (expr, 0);
3707 else if (BINARY_CLASS_P (expr))
3709 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3710 expr = TREE_OPERAND (expr, 0);
3711 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3712 expr = TREE_OPERAND (expr, 1);
3713 else
3714 break;
3716 else
3717 break;
3720 return expr;
3723 /* Return which tree structure is used by T. */
3725 enum tree_node_structure_enum
3726 tree_node_structure (const_tree t)
3728 const enum tree_code code = TREE_CODE (t);
3729 return tree_node_structure_for_code (code);
3732 /* Set various status flags when building a CALL_EXPR object T. */
3734 static void
3735 process_call_operands (tree t)
3737 bool side_effects = TREE_SIDE_EFFECTS (t);
3738 bool read_only = false;
3739 int i = call_expr_flags (t);
3741 /* Calls have side-effects, except those to const or pure functions. */
3742 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3743 side_effects = true;
3744 /* Propagate TREE_READONLY of arguments for const functions. */
3745 if (i & ECF_CONST)
3746 read_only = true;
3748 if (!side_effects || read_only)
3749 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3751 tree op = TREE_OPERAND (t, i);
3752 if (op && TREE_SIDE_EFFECTS (op))
3753 side_effects = true;
3754 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3755 read_only = false;
3758 TREE_SIDE_EFFECTS (t) = side_effects;
3759 TREE_READONLY (t) = read_only;
3762 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3763 size or offset that depends on a field within a record. */
3765 bool
3766 contains_placeholder_p (const_tree exp)
3768 enum tree_code code;
3770 if (!exp)
3771 return 0;
3773 code = TREE_CODE (exp);
3774 if (code == PLACEHOLDER_EXPR)
3775 return 1;
3777 switch (TREE_CODE_CLASS (code))
3779 case tcc_reference:
3780 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3781 position computations since they will be converted into a
3782 WITH_RECORD_EXPR involving the reference, which will assume
3783 here will be valid. */
3784 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3786 case tcc_exceptional:
3787 if (code == TREE_LIST)
3788 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3789 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3790 break;
3792 case tcc_unary:
3793 case tcc_binary:
3794 case tcc_comparison:
3795 case tcc_expression:
3796 switch (code)
3798 case COMPOUND_EXPR:
3799 /* Ignoring the first operand isn't quite right, but works best. */
3800 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3802 case COND_EXPR:
3803 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3804 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3805 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3807 case SAVE_EXPR:
3808 /* The save_expr function never wraps anything containing
3809 a PLACEHOLDER_EXPR. */
3810 return 0;
3812 default:
3813 break;
3816 switch (TREE_CODE_LENGTH (code))
3818 case 1:
3819 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3820 case 2:
3821 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3822 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3823 default:
3824 return 0;
3827 case tcc_vl_exp:
3828 switch (code)
3830 case CALL_EXPR:
3832 const_tree arg;
3833 const_call_expr_arg_iterator iter;
3834 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3835 if (CONTAINS_PLACEHOLDER_P (arg))
3836 return 1;
3837 return 0;
3839 default:
3840 return 0;
3843 default:
3844 return 0;
3846 return 0;
3849 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3850 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3851 field positions. */
3853 static bool
3854 type_contains_placeholder_1 (const_tree type)
3856 /* If the size contains a placeholder or the parent type (component type in
3857 the case of arrays) type involves a placeholder, this type does. */
3858 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3859 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3860 || (!POINTER_TYPE_P (type)
3861 && TREE_TYPE (type)
3862 && type_contains_placeholder_p (TREE_TYPE (type))))
3863 return true;
3865 /* Now do type-specific checks. Note that the last part of the check above
3866 greatly limits what we have to do below. */
3867 switch (TREE_CODE (type))
3869 case VOID_TYPE:
3870 case COMPLEX_TYPE:
3871 case ENUMERAL_TYPE:
3872 case BOOLEAN_TYPE:
3873 case POINTER_TYPE:
3874 case OFFSET_TYPE:
3875 case REFERENCE_TYPE:
3876 case METHOD_TYPE:
3877 case FUNCTION_TYPE:
3878 case VECTOR_TYPE:
3879 case NULLPTR_TYPE:
3880 return false;
3882 case INTEGER_TYPE:
3883 case REAL_TYPE:
3884 case FIXED_POINT_TYPE:
3885 /* Here we just check the bounds. */
3886 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3887 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3889 case ARRAY_TYPE:
3890 /* We have already checked the component type above, so just check
3891 the domain type. Flexible array members have a null domain. */
3892 return TYPE_DOMAIN (type) ?
3893 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3895 case RECORD_TYPE:
3896 case UNION_TYPE:
3897 case QUAL_UNION_TYPE:
3899 tree field;
3901 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3902 if (TREE_CODE (field) == FIELD_DECL
3903 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3904 || (TREE_CODE (type) == QUAL_UNION_TYPE
3905 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3906 || type_contains_placeholder_p (TREE_TYPE (field))))
3907 return true;
3909 return false;
3912 default:
3913 gcc_unreachable ();
3917 /* Wrapper around above function used to cache its result. */
3919 bool
3920 type_contains_placeholder_p (tree type)
3922 bool result;
3924 /* If the contains_placeholder_bits field has been initialized,
3925 then we know the answer. */
3926 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3927 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3929 /* Indicate that we've seen this type node, and the answer is false.
3930 This is what we want to return if we run into recursion via fields. */
3931 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3933 /* Compute the real value. */
3934 result = type_contains_placeholder_1 (type);
3936 /* Store the real value. */
3937 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3939 return result;
3942 /* Push tree EXP onto vector QUEUE if it is not already present. */
3944 static void
3945 push_without_duplicates (tree exp, vec<tree> *queue)
3947 unsigned int i;
3948 tree iter;
3950 FOR_EACH_VEC_ELT (*queue, i, iter)
3951 if (simple_cst_equal (iter, exp) == 1)
3952 break;
3954 if (!iter)
3955 queue->safe_push (exp);
3958 /* Given a tree EXP, find all occurrences of references to fields
3959 in a PLACEHOLDER_EXPR and place them in vector REFS without
3960 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3961 we assume here that EXP contains only arithmetic expressions
3962 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3963 argument list. */
3965 void
3966 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3968 enum tree_code code = TREE_CODE (exp);
3969 tree inner;
3970 int i;
3972 /* We handle TREE_LIST and COMPONENT_REF separately. */
3973 if (code == TREE_LIST)
3975 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3976 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3978 else if (code == COMPONENT_REF)
3980 for (inner = TREE_OPERAND (exp, 0);
3981 REFERENCE_CLASS_P (inner);
3982 inner = TREE_OPERAND (inner, 0))
3985 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3986 push_without_duplicates (exp, refs);
3987 else
3988 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3990 else
3991 switch (TREE_CODE_CLASS (code))
3993 case tcc_constant:
3994 break;
3996 case tcc_declaration:
3997 /* Variables allocated to static storage can stay. */
3998 if (!TREE_STATIC (exp))
3999 push_without_duplicates (exp, refs);
4000 break;
4002 case tcc_expression:
4003 /* This is the pattern built in ada/make_aligning_type. */
4004 if (code == ADDR_EXPR
4005 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4007 push_without_duplicates (exp, refs);
4008 break;
4011 /* Fall through. */
4013 case tcc_exceptional:
4014 case tcc_unary:
4015 case tcc_binary:
4016 case tcc_comparison:
4017 case tcc_reference:
4018 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4019 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4020 break;
4022 case tcc_vl_exp:
4023 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4024 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4025 break;
4027 default:
4028 gcc_unreachable ();
4032 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4033 return a tree with all occurrences of references to F in a
4034 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4035 CONST_DECLs. Note that we assume here that EXP contains only
4036 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4037 occurring only in their argument list. */
4039 tree
4040 substitute_in_expr (tree exp, tree f, tree r)
4042 enum tree_code code = TREE_CODE (exp);
4043 tree op0, op1, op2, op3;
4044 tree new_tree;
4046 /* We handle TREE_LIST and COMPONENT_REF separately. */
4047 if (code == TREE_LIST)
4049 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4050 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4051 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4052 return exp;
4054 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4056 else if (code == COMPONENT_REF)
4058 tree inner;
4060 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4061 and it is the right field, replace it with R. */
4062 for (inner = TREE_OPERAND (exp, 0);
4063 REFERENCE_CLASS_P (inner);
4064 inner = TREE_OPERAND (inner, 0))
4067 /* The field. */
4068 op1 = TREE_OPERAND (exp, 1);
4070 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4071 return r;
4073 /* If this expression hasn't been completed let, leave it alone. */
4074 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4075 return exp;
4077 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4078 if (op0 == TREE_OPERAND (exp, 0))
4079 return exp;
4081 new_tree
4082 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4084 else
4085 switch (TREE_CODE_CLASS (code))
4087 case tcc_constant:
4088 return exp;
4090 case tcc_declaration:
4091 if (exp == f)
4092 return r;
4093 else
4094 return exp;
4096 case tcc_expression:
4097 if (exp == f)
4098 return r;
4100 /* Fall through. */
4102 case tcc_exceptional:
4103 case tcc_unary:
4104 case tcc_binary:
4105 case tcc_comparison:
4106 case tcc_reference:
4107 switch (TREE_CODE_LENGTH (code))
4109 case 0:
4110 return exp;
4112 case 1:
4113 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4114 if (op0 == TREE_OPERAND (exp, 0))
4115 return exp;
4117 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4118 break;
4120 case 2:
4121 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4122 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4124 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4125 return exp;
4127 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4128 break;
4130 case 3:
4131 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4132 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4133 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4135 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4136 && op2 == TREE_OPERAND (exp, 2))
4137 return exp;
4139 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4140 break;
4142 case 4:
4143 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4144 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4145 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4146 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4148 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4149 && op2 == TREE_OPERAND (exp, 2)
4150 && op3 == TREE_OPERAND (exp, 3))
4151 return exp;
4153 new_tree
4154 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4155 break;
4157 default:
4158 gcc_unreachable ();
4160 break;
4162 case tcc_vl_exp:
4164 int i;
4166 new_tree = NULL_TREE;
4168 /* If we are trying to replace F with a constant or with another
4169 instance of one of the arguments of the call, inline back
4170 functions which do nothing else than computing a value from
4171 the arguments they are passed. This makes it possible to
4172 fold partially or entirely the replacement expression. */
4173 if (code == CALL_EXPR)
4175 bool maybe_inline = false;
4176 if (CONSTANT_CLASS_P (r))
4177 maybe_inline = true;
4178 else
4179 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4180 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4182 maybe_inline = true;
4183 break;
4185 if (maybe_inline)
4187 tree t = maybe_inline_call_in_expr (exp);
4188 if (t)
4189 return SUBSTITUTE_IN_EXPR (t, f, r);
4193 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4195 tree op = TREE_OPERAND (exp, i);
4196 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4197 if (new_op != op)
4199 if (!new_tree)
4200 new_tree = copy_node (exp);
4201 TREE_OPERAND (new_tree, i) = new_op;
4205 if (new_tree)
4207 new_tree = fold (new_tree);
4208 if (TREE_CODE (new_tree) == CALL_EXPR)
4209 process_call_operands (new_tree);
4211 else
4212 return exp;
4214 break;
4216 default:
4217 gcc_unreachable ();
4220 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4222 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4223 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4225 return new_tree;
4228 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4229 for it within OBJ, a tree that is an object or a chain of references. */
4231 tree
4232 substitute_placeholder_in_expr (tree exp, tree obj)
4234 enum tree_code code = TREE_CODE (exp);
4235 tree op0, op1, op2, op3;
4236 tree new_tree;
4238 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4239 in the chain of OBJ. */
4240 if (code == PLACEHOLDER_EXPR)
4242 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4243 tree elt;
4245 for (elt = obj; elt != 0;
4246 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4247 || TREE_CODE (elt) == COND_EXPR)
4248 ? TREE_OPERAND (elt, 1)
4249 : (REFERENCE_CLASS_P (elt)
4250 || UNARY_CLASS_P (elt)
4251 || BINARY_CLASS_P (elt)
4252 || VL_EXP_CLASS_P (elt)
4253 || EXPRESSION_CLASS_P (elt))
4254 ? TREE_OPERAND (elt, 0) : 0))
4255 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4256 return elt;
4258 for (elt = obj; elt != 0;
4259 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4260 || TREE_CODE (elt) == COND_EXPR)
4261 ? TREE_OPERAND (elt, 1)
4262 : (REFERENCE_CLASS_P (elt)
4263 || UNARY_CLASS_P (elt)
4264 || BINARY_CLASS_P (elt)
4265 || VL_EXP_CLASS_P (elt)
4266 || EXPRESSION_CLASS_P (elt))
4267 ? TREE_OPERAND (elt, 0) : 0))
4268 if (POINTER_TYPE_P (TREE_TYPE (elt))
4269 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4270 == need_type))
4271 return fold_build1 (INDIRECT_REF, need_type, elt);
4273 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4274 survives until RTL generation, there will be an error. */
4275 return exp;
4278 /* TREE_LIST is special because we need to look at TREE_VALUE
4279 and TREE_CHAIN, not TREE_OPERANDS. */
4280 else if (code == TREE_LIST)
4282 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4283 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4284 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4285 return exp;
4287 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4289 else
4290 switch (TREE_CODE_CLASS (code))
4292 case tcc_constant:
4293 case tcc_declaration:
4294 return exp;
4296 case tcc_exceptional:
4297 case tcc_unary:
4298 case tcc_binary:
4299 case tcc_comparison:
4300 case tcc_expression:
4301 case tcc_reference:
4302 case tcc_statement:
4303 switch (TREE_CODE_LENGTH (code))
4305 case 0:
4306 return exp;
4308 case 1:
4309 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4310 if (op0 == TREE_OPERAND (exp, 0))
4311 return exp;
4313 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4314 break;
4316 case 2:
4317 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4318 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4320 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4321 return exp;
4323 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4324 break;
4326 case 3:
4327 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4328 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4329 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4331 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4332 && op2 == TREE_OPERAND (exp, 2))
4333 return exp;
4335 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4336 break;
4338 case 4:
4339 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4340 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4341 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4342 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4344 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4345 && op2 == TREE_OPERAND (exp, 2)
4346 && op3 == TREE_OPERAND (exp, 3))
4347 return exp;
4349 new_tree
4350 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4351 break;
4353 default:
4354 gcc_unreachable ();
4356 break;
4358 case tcc_vl_exp:
4360 int i;
4362 new_tree = NULL_TREE;
4364 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4366 tree op = TREE_OPERAND (exp, i);
4367 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4368 if (new_op != op)
4370 if (!new_tree)
4371 new_tree = copy_node (exp);
4372 TREE_OPERAND (new_tree, i) = new_op;
4376 if (new_tree)
4378 new_tree = fold (new_tree);
4379 if (TREE_CODE (new_tree) == CALL_EXPR)
4380 process_call_operands (new_tree);
4382 else
4383 return exp;
4385 break;
4387 default:
4388 gcc_unreachable ();
4391 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4393 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4394 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4396 return new_tree;
4400 /* Subroutine of stabilize_reference; this is called for subtrees of
4401 references. Any expression with side-effects must be put in a SAVE_EXPR
4402 to ensure that it is only evaluated once.
4404 We don't put SAVE_EXPR nodes around everything, because assigning very
4405 simple expressions to temporaries causes us to miss good opportunities
4406 for optimizations. Among other things, the opportunity to fold in the
4407 addition of a constant into an addressing mode often gets lost, e.g.
4408 "y[i+1] += x;". In general, we take the approach that we should not make
4409 an assignment unless we are forced into it - i.e., that any non-side effect
4410 operator should be allowed, and that cse should take care of coalescing
4411 multiple utterances of the same expression should that prove fruitful. */
4413 static tree
4414 stabilize_reference_1 (tree e)
4416 tree result;
4417 enum tree_code code = TREE_CODE (e);
4419 /* We cannot ignore const expressions because it might be a reference
4420 to a const array but whose index contains side-effects. But we can
4421 ignore things that are actual constant or that already have been
4422 handled by this function. */
4424 if (tree_invariant_p (e))
4425 return e;
4427 switch (TREE_CODE_CLASS (code))
4429 case tcc_exceptional:
4430 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4431 have side-effects. */
4432 if (code == STATEMENT_LIST)
4433 return save_expr (e);
4434 /* FALLTHRU */
4435 case tcc_type:
4436 case tcc_declaration:
4437 case tcc_comparison:
4438 case tcc_statement:
4439 case tcc_expression:
4440 case tcc_reference:
4441 case tcc_vl_exp:
4442 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4443 so that it will only be evaluated once. */
4444 /* The reference (r) and comparison (<) classes could be handled as
4445 below, but it is generally faster to only evaluate them once. */
4446 if (TREE_SIDE_EFFECTS (e))
4447 return save_expr (e);
4448 return e;
4450 case tcc_constant:
4451 /* Constants need no processing. In fact, we should never reach
4452 here. */
4453 return e;
4455 case tcc_binary:
4456 /* Division is slow and tends to be compiled with jumps,
4457 especially the division by powers of 2 that is often
4458 found inside of an array reference. So do it just once. */
4459 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4460 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4461 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4462 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4463 return save_expr (e);
4464 /* Recursively stabilize each operand. */
4465 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4466 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4467 break;
4469 case tcc_unary:
4470 /* Recursively stabilize each operand. */
4471 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4472 break;
4474 default:
4475 gcc_unreachable ();
4478 TREE_TYPE (result) = TREE_TYPE (e);
4479 TREE_READONLY (result) = TREE_READONLY (e);
4480 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4481 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4483 return result;
4486 /* Stabilize a reference so that we can use it any number of times
4487 without causing its operands to be evaluated more than once.
4488 Returns the stabilized reference. This works by means of save_expr,
4489 so see the caveats in the comments about save_expr.
4491 Also allows conversion expressions whose operands are references.
4492 Any other kind of expression is returned unchanged. */
4494 tree
4495 stabilize_reference (tree ref)
4497 tree result;
4498 enum tree_code code = TREE_CODE (ref);
4500 switch (code)
4502 case VAR_DECL:
4503 case PARM_DECL:
4504 case RESULT_DECL:
4505 /* No action is needed in this case. */
4506 return ref;
4508 CASE_CONVERT:
4509 case FLOAT_EXPR:
4510 case FIX_TRUNC_EXPR:
4511 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4512 break;
4514 case INDIRECT_REF:
4515 result = build_nt (INDIRECT_REF,
4516 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4517 break;
4519 case COMPONENT_REF:
4520 result = build_nt (COMPONENT_REF,
4521 stabilize_reference (TREE_OPERAND (ref, 0)),
4522 TREE_OPERAND (ref, 1), NULL_TREE);
4523 break;
4525 case BIT_FIELD_REF:
4526 result = build_nt (BIT_FIELD_REF,
4527 stabilize_reference (TREE_OPERAND (ref, 0)),
4528 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4529 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4530 break;
4532 case ARRAY_REF:
4533 result = build_nt (ARRAY_REF,
4534 stabilize_reference (TREE_OPERAND (ref, 0)),
4535 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4536 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4537 break;
4539 case ARRAY_RANGE_REF:
4540 result = build_nt (ARRAY_RANGE_REF,
4541 stabilize_reference (TREE_OPERAND (ref, 0)),
4542 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4543 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4544 break;
4546 case COMPOUND_EXPR:
4547 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4548 it wouldn't be ignored. This matters when dealing with
4549 volatiles. */
4550 return stabilize_reference_1 (ref);
4552 /* If arg isn't a kind of lvalue we recognize, make no change.
4553 Caller should recognize the error for an invalid lvalue. */
4554 default:
4555 return ref;
4557 case ERROR_MARK:
4558 return error_mark_node;
4561 TREE_TYPE (result) = TREE_TYPE (ref);
4562 TREE_READONLY (result) = TREE_READONLY (ref);
4563 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4564 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4566 return result;
4569 /* Low-level constructors for expressions. */
4571 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4572 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4574 void
4575 recompute_tree_invariant_for_addr_expr (tree t)
4577 tree node;
4578 bool tc = true, se = false;
4580 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4582 /* We started out assuming this address is both invariant and constant, but
4583 does not have side effects. Now go down any handled components and see if
4584 any of them involve offsets that are either non-constant or non-invariant.
4585 Also check for side-effects.
4587 ??? Note that this code makes no attempt to deal with the case where
4588 taking the address of something causes a copy due to misalignment. */
4590 #define UPDATE_FLAGS(NODE) \
4591 do { tree _node = (NODE); \
4592 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4593 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4595 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4596 node = TREE_OPERAND (node, 0))
4598 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4599 array reference (probably made temporarily by the G++ front end),
4600 so ignore all the operands. */
4601 if ((TREE_CODE (node) == ARRAY_REF
4602 || TREE_CODE (node) == ARRAY_RANGE_REF)
4603 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4605 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4606 if (TREE_OPERAND (node, 2))
4607 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4608 if (TREE_OPERAND (node, 3))
4609 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4611 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4612 FIELD_DECL, apparently. The G++ front end can put something else
4613 there, at least temporarily. */
4614 else if (TREE_CODE (node) == COMPONENT_REF
4615 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4617 if (TREE_OPERAND (node, 2))
4618 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4622 node = lang_hooks.expr_to_decl (node, &tc, &se);
4624 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4625 the address, since &(*a)->b is a form of addition. If it's a constant, the
4626 address is constant too. If it's a decl, its address is constant if the
4627 decl is static. Everything else is not constant and, furthermore,
4628 taking the address of a volatile variable is not volatile. */
4629 if (TREE_CODE (node) == INDIRECT_REF
4630 || TREE_CODE (node) == MEM_REF)
4631 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4632 else if (CONSTANT_CLASS_P (node))
4634 else if (DECL_P (node))
4635 tc &= (staticp (node) != NULL_TREE);
4636 else
4638 tc = false;
4639 se |= TREE_SIDE_EFFECTS (node);
4643 TREE_CONSTANT (t) = tc;
4644 TREE_SIDE_EFFECTS (t) = se;
4645 #undef UPDATE_FLAGS
4648 /* Build an expression of code CODE, data type TYPE, and operands as
4649 specified. Expressions and reference nodes can be created this way.
4650 Constants, decls, types and misc nodes cannot be.
4652 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4653 enough for all extant tree codes. */
4655 tree
4656 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4658 tree t;
4660 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4662 t = make_node (code PASS_MEM_STAT);
4663 TREE_TYPE (t) = tt;
4665 return t;
4668 tree
4669 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4671 int length = sizeof (struct tree_exp);
4672 tree t;
4674 record_node_allocation_statistics (code, length);
4676 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4678 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4680 memset (t, 0, sizeof (struct tree_common));
4682 TREE_SET_CODE (t, code);
4684 TREE_TYPE (t) = type;
4685 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4686 TREE_OPERAND (t, 0) = node;
4687 if (node && !TYPE_P (node))
4689 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4690 TREE_READONLY (t) = TREE_READONLY (node);
4693 if (TREE_CODE_CLASS (code) == tcc_statement)
4695 if (code != DEBUG_BEGIN_STMT)
4696 TREE_SIDE_EFFECTS (t) = 1;
4698 else switch (code)
4700 case VA_ARG_EXPR:
4701 /* All of these have side-effects, no matter what their
4702 operands are. */
4703 TREE_SIDE_EFFECTS (t) = 1;
4704 TREE_READONLY (t) = 0;
4705 break;
4707 case INDIRECT_REF:
4708 /* Whether a dereference is readonly has nothing to do with whether
4709 its operand is readonly. */
4710 TREE_READONLY (t) = 0;
4711 break;
4713 case ADDR_EXPR:
4714 if (node)
4715 recompute_tree_invariant_for_addr_expr (t);
4716 break;
4718 default:
4719 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4720 && node && !TYPE_P (node)
4721 && TREE_CONSTANT (node))
4722 TREE_CONSTANT (t) = 1;
4723 if (TREE_CODE_CLASS (code) == tcc_reference
4724 && node && TREE_THIS_VOLATILE (node))
4725 TREE_THIS_VOLATILE (t) = 1;
4726 break;
4729 return t;
4732 #define PROCESS_ARG(N) \
4733 do { \
4734 TREE_OPERAND (t, N) = arg##N; \
4735 if (arg##N &&!TYPE_P (arg##N)) \
4737 if (TREE_SIDE_EFFECTS (arg##N)) \
4738 side_effects = 1; \
4739 if (!TREE_READONLY (arg##N) \
4740 && !CONSTANT_CLASS_P (arg##N)) \
4741 (void) (read_only = 0); \
4742 if (!TREE_CONSTANT (arg##N)) \
4743 (void) (constant = 0); \
4745 } while (0)
4747 tree
4748 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4750 bool constant, read_only, side_effects, div_by_zero;
4751 tree t;
4753 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4755 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4756 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4757 /* When sizetype precision doesn't match that of pointers
4758 we need to be able to build explicit extensions or truncations
4759 of the offset argument. */
4760 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4761 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4762 && TREE_CODE (arg1) == INTEGER_CST);
4764 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4765 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4766 && ptrofftype_p (TREE_TYPE (arg1)));
4768 t = make_node (code PASS_MEM_STAT);
4769 TREE_TYPE (t) = tt;
4771 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4772 result based on those same flags for the arguments. But if the
4773 arguments aren't really even `tree' expressions, we shouldn't be trying
4774 to do this. */
4776 /* Expressions without side effects may be constant if their
4777 arguments are as well. */
4778 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4779 || TREE_CODE_CLASS (code) == tcc_binary);
4780 read_only = 1;
4781 side_effects = TREE_SIDE_EFFECTS (t);
4783 switch (code)
4785 case TRUNC_DIV_EXPR:
4786 case CEIL_DIV_EXPR:
4787 case FLOOR_DIV_EXPR:
4788 case ROUND_DIV_EXPR:
4789 case EXACT_DIV_EXPR:
4790 case CEIL_MOD_EXPR:
4791 case FLOOR_MOD_EXPR:
4792 case ROUND_MOD_EXPR:
4793 case TRUNC_MOD_EXPR:
4794 div_by_zero = integer_zerop (arg1);
4795 break;
4796 default:
4797 div_by_zero = false;
4800 PROCESS_ARG (0);
4801 PROCESS_ARG (1);
4803 TREE_SIDE_EFFECTS (t) = side_effects;
4804 if (code == MEM_REF)
4806 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4808 tree o = TREE_OPERAND (arg0, 0);
4809 TREE_READONLY (t) = TREE_READONLY (o);
4810 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4813 else
4815 TREE_READONLY (t) = read_only;
4816 /* Don't mark X / 0 as constant. */
4817 TREE_CONSTANT (t) = constant && !div_by_zero;
4818 TREE_THIS_VOLATILE (t)
4819 = (TREE_CODE_CLASS (code) == tcc_reference
4820 && arg0 && TREE_THIS_VOLATILE (arg0));
4823 return t;
4827 tree
4828 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4829 tree arg2 MEM_STAT_DECL)
4831 bool constant, read_only, side_effects;
4832 tree t;
4834 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4835 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4837 t = make_node (code PASS_MEM_STAT);
4838 TREE_TYPE (t) = tt;
4840 read_only = 1;
4842 /* As a special exception, if COND_EXPR has NULL branches, we
4843 assume that it is a gimple statement and always consider
4844 it to have side effects. */
4845 if (code == COND_EXPR
4846 && tt == void_type_node
4847 && arg1 == NULL_TREE
4848 && arg2 == NULL_TREE)
4849 side_effects = true;
4850 else
4851 side_effects = TREE_SIDE_EFFECTS (t);
4853 PROCESS_ARG (0);
4854 PROCESS_ARG (1);
4855 PROCESS_ARG (2);
4857 if (code == COND_EXPR)
4858 TREE_READONLY (t) = read_only;
4860 TREE_SIDE_EFFECTS (t) = side_effects;
4861 TREE_THIS_VOLATILE (t)
4862 = (TREE_CODE_CLASS (code) == tcc_reference
4863 && arg0 && TREE_THIS_VOLATILE (arg0));
4865 return t;
4868 tree
4869 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4870 tree arg2, tree arg3 MEM_STAT_DECL)
4872 bool constant, read_only, side_effects;
4873 tree t;
4875 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4877 t = make_node (code PASS_MEM_STAT);
4878 TREE_TYPE (t) = tt;
4880 side_effects = TREE_SIDE_EFFECTS (t);
4882 PROCESS_ARG (0);
4883 PROCESS_ARG (1);
4884 PROCESS_ARG (2);
4885 PROCESS_ARG (3);
4887 TREE_SIDE_EFFECTS (t) = side_effects;
4888 TREE_THIS_VOLATILE (t)
4889 = (TREE_CODE_CLASS (code) == tcc_reference
4890 && arg0 && TREE_THIS_VOLATILE (arg0));
4892 return t;
4895 tree
4896 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4897 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4899 bool constant, read_only, side_effects;
4900 tree t;
4902 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4904 t = make_node (code PASS_MEM_STAT);
4905 TREE_TYPE (t) = tt;
4907 side_effects = TREE_SIDE_EFFECTS (t);
4909 PROCESS_ARG (0);
4910 PROCESS_ARG (1);
4911 PROCESS_ARG (2);
4912 PROCESS_ARG (3);
4913 PROCESS_ARG (4);
4915 TREE_SIDE_EFFECTS (t) = side_effects;
4916 if (code == TARGET_MEM_REF)
4918 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4920 tree o = TREE_OPERAND (arg0, 0);
4921 TREE_READONLY (t) = TREE_READONLY (o);
4922 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4925 else
4926 TREE_THIS_VOLATILE (t)
4927 = (TREE_CODE_CLASS (code) == tcc_reference
4928 && arg0 && TREE_THIS_VOLATILE (arg0));
4930 return t;
4933 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4934 on the pointer PTR. */
4936 tree
4937 build_simple_mem_ref_loc (location_t loc, tree ptr)
4939 poly_int64 offset = 0;
4940 tree ptype = TREE_TYPE (ptr);
4941 tree tem;
4942 /* For convenience allow addresses that collapse to a simple base
4943 and offset. */
4944 if (TREE_CODE (ptr) == ADDR_EXPR
4945 && (handled_component_p (TREE_OPERAND (ptr, 0))
4946 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4948 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4949 gcc_assert (ptr);
4950 if (TREE_CODE (ptr) == MEM_REF)
4952 offset += mem_ref_offset (ptr).force_shwi ();
4953 ptr = TREE_OPERAND (ptr, 0);
4955 else
4956 ptr = build_fold_addr_expr (ptr);
4957 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4959 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4960 ptr, build_int_cst (ptype, offset));
4961 SET_EXPR_LOCATION (tem, loc);
4962 return tem;
4965 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4967 poly_offset_int
4968 mem_ref_offset (const_tree t)
4970 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4971 SIGNED);
4974 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4975 offsetted by OFFSET units. */
4977 tree
4978 build_invariant_address (tree type, tree base, poly_int64 offset)
4980 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4981 build_fold_addr_expr (base),
4982 build_int_cst (ptr_type_node, offset));
4983 tree addr = build1 (ADDR_EXPR, type, ref);
4984 recompute_tree_invariant_for_addr_expr (addr);
4985 return addr;
4988 /* Similar except don't specify the TREE_TYPE
4989 and leave the TREE_SIDE_EFFECTS as 0.
4990 It is permissible for arguments to be null,
4991 or even garbage if their values do not matter. */
4993 tree
4994 build_nt (enum tree_code code, ...)
4996 tree t;
4997 int length;
4998 int i;
4999 va_list p;
5001 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5003 va_start (p, code);
5005 t = make_node (code);
5006 length = TREE_CODE_LENGTH (code);
5008 for (i = 0; i < length; i++)
5009 TREE_OPERAND (t, i) = va_arg (p, tree);
5011 va_end (p);
5012 return t;
5015 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5016 tree vec. */
5018 tree
5019 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5021 tree ret, t;
5022 unsigned int ix;
5024 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5025 CALL_EXPR_FN (ret) = fn;
5026 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5027 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5028 CALL_EXPR_ARG (ret, ix) = t;
5029 return ret;
5032 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5033 and data type TYPE.
5034 We do NOT enter this node in any sort of symbol table.
5036 LOC is the location of the decl.
5038 layout_decl is used to set up the decl's storage layout.
5039 Other slots are initialized to 0 or null pointers. */
5041 tree
5042 build_decl (location_t loc, enum tree_code code, tree name,
5043 tree type MEM_STAT_DECL)
5045 tree t;
5047 t = make_node (code PASS_MEM_STAT);
5048 DECL_SOURCE_LOCATION (t) = loc;
5050 /* if (type == error_mark_node)
5051 type = integer_type_node; */
5052 /* That is not done, deliberately, so that having error_mark_node
5053 as the type can suppress useless errors in the use of this variable. */
5055 DECL_NAME (t) = name;
5056 TREE_TYPE (t) = type;
5058 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5059 layout_decl (t, 0);
5061 return t;
5064 /* Builds and returns function declaration with NAME and TYPE. */
5066 tree
5067 build_fn_decl (const char *name, tree type)
5069 tree id = get_identifier (name);
5070 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5072 DECL_EXTERNAL (decl) = 1;
5073 TREE_PUBLIC (decl) = 1;
5074 DECL_ARTIFICIAL (decl) = 1;
5075 TREE_NOTHROW (decl) = 1;
5077 return decl;
5080 vec<tree, va_gc> *all_translation_units;
5082 /* Builds a new translation-unit decl with name NAME, queues it in the
5083 global list of translation-unit decls and returns it. */
5085 tree
5086 build_translation_unit_decl (tree name)
5088 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5089 name, NULL_TREE);
5090 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5091 vec_safe_push (all_translation_units, tu);
5092 return tu;
5096 /* BLOCK nodes are used to represent the structure of binding contours
5097 and declarations, once those contours have been exited and their contents
5098 compiled. This information is used for outputting debugging info. */
5100 tree
5101 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5103 tree block = make_node (BLOCK);
5105 BLOCK_VARS (block) = vars;
5106 BLOCK_SUBBLOCKS (block) = subblocks;
5107 BLOCK_SUPERCONTEXT (block) = supercontext;
5108 BLOCK_CHAIN (block) = chain;
5109 return block;
5113 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5115 LOC is the location to use in tree T. */
5117 void
5118 protected_set_expr_location (tree t, location_t loc)
5120 if (CAN_HAVE_LOCATION_P (t))
5121 SET_EXPR_LOCATION (t, loc);
5124 /* Data used when collecting DECLs and TYPEs for language data removal. */
5126 class free_lang_data_d
5128 public:
5129 free_lang_data_d () : decls (100), types (100) {}
5131 /* Worklist to avoid excessive recursion. */
5132 auto_vec<tree> worklist;
5134 /* Set of traversed objects. Used to avoid duplicate visits. */
5135 hash_set<tree> pset;
5137 /* Array of symbols to process with free_lang_data_in_decl. */
5138 auto_vec<tree> decls;
5140 /* Array of types to process with free_lang_data_in_type. */
5141 auto_vec<tree> types;
5145 /* Add type or decl T to one of the list of tree nodes that need their
5146 language data removed. The lists are held inside FLD. */
5148 static void
5149 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5151 if (DECL_P (t))
5152 fld->decls.safe_push (t);
5153 else if (TYPE_P (t))
5154 fld->types.safe_push (t);
5155 else
5156 gcc_unreachable ();
5159 /* Push tree node T into FLD->WORKLIST. */
5161 static inline void
5162 fld_worklist_push (tree t, class free_lang_data_d *fld)
5164 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5165 fld->worklist.safe_push ((t));
5170 /* Return simplified TYPE_NAME of TYPE. */
5172 static tree
5173 fld_simplified_type_name (tree type)
5175 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5176 return TYPE_NAME (type);
5177 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5178 TYPE_DECL if the type doesn't have linkage.
5179 this must match fld_ */
5180 if (type != TYPE_MAIN_VARIANT (type)
5181 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5182 && (TREE_CODE (type) != RECORD_TYPE
5183 || !TYPE_BINFO (type)
5184 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5185 return DECL_NAME (TYPE_NAME (type));
5186 return TYPE_NAME (type);
5189 /* Do same comparsion as check_qualified_type skipping lang part of type
5190 and be more permissive about type names: we only care that names are
5191 same (for diagnostics) and that ODR names are the same.
5192 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5194 static bool
5195 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5197 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5198 /* We want to match incomplete variants with complete types.
5199 In this case we need to ignore alignment. */
5200 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5201 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5202 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5203 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5204 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5205 TYPE_ATTRIBUTES (v))
5206 || (inner_type && TREE_TYPE (v) != inner_type))
5207 return false;
5209 return true;
5212 /* Find variant of FIRST that match T and create new one if necessary.
5213 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5215 static tree
5216 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5217 tree inner_type = NULL)
5219 if (first == TYPE_MAIN_VARIANT (t))
5220 return t;
5221 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5222 if (fld_type_variant_equal_p (t, v, inner_type))
5223 return v;
5224 tree v = build_variant_type_copy (first);
5225 TYPE_READONLY (v) = TYPE_READONLY (t);
5226 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5227 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5228 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5229 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5230 TYPE_NAME (v) = TYPE_NAME (t);
5231 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5232 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5233 /* Variants of incomplete types should have alignment
5234 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5235 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5237 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5238 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5240 if (inner_type)
5241 TREE_TYPE (v) = inner_type;
5242 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5243 if (!fld->pset.add (v))
5244 add_tree_to_fld_list (v, fld);
5245 return v;
5248 /* Map complete types to incomplete types. */
5250 static hash_map<tree, tree> *fld_incomplete_types;
5252 /* Map types to simplified types. */
5254 static hash_map<tree, tree> *fld_simplified_types;
5256 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5257 use MAP to prevent duplicates. */
5259 static tree
5260 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5261 class free_lang_data_d *fld)
5263 if (TREE_TYPE (t) == t2)
5264 return t;
5266 if (TYPE_MAIN_VARIANT (t) != t)
5268 return fld_type_variant
5269 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5270 TYPE_MAIN_VARIANT (t2), map, fld),
5271 t, fld, t2);
5274 bool existed;
5275 tree &array
5276 = map->get_or_insert (t, &existed);
5277 if (!existed)
5279 array = build_array_type_1 (t2, TYPE_DOMAIN (t),
5280 TYPE_TYPELESS_STORAGE (t), false);
5281 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5282 if (!fld->pset.add (array))
5283 add_tree_to_fld_list (array, fld);
5285 return array;
5288 /* Return CTX after removal of contexts that are not relevant */
5290 static tree
5291 fld_decl_context (tree ctx)
5293 /* Variably modified types are needed for tree_is_indexable to decide
5294 whether the type needs to go to local or global section.
5295 This code is semi-broken but for now it is easiest to keep contexts
5296 as expected. */
5297 if (ctx && TYPE_P (ctx)
5298 && !variably_modified_type_p (ctx, NULL_TREE))
5300 while (ctx && TYPE_P (ctx))
5301 ctx = TYPE_CONTEXT (ctx);
5303 return ctx;
5306 /* For T being aggregate type try to turn it into a incomplete variant.
5307 Return T if no simplification is possible. */
5309 static tree
5310 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5312 if (!t)
5313 return NULL;
5314 if (POINTER_TYPE_P (t))
5316 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5317 if (t2 != TREE_TYPE (t))
5319 tree first;
5320 if (TREE_CODE (t) == POINTER_TYPE)
5321 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5322 TYPE_REF_CAN_ALIAS_ALL (t));
5323 else
5324 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5325 TYPE_REF_CAN_ALIAS_ALL (t));
5326 gcc_assert (TYPE_CANONICAL (t2) != t2
5327 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5328 if (!fld->pset.add (first))
5329 add_tree_to_fld_list (first, fld);
5330 return fld_type_variant (first, t, fld);
5332 return t;
5334 if (TREE_CODE (t) == ARRAY_TYPE)
5335 return fld_process_array_type (t,
5336 fld_incomplete_type_of (TREE_TYPE (t), fld),
5337 fld_incomplete_types, fld);
5338 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5339 || !COMPLETE_TYPE_P (t))
5340 return t;
5341 if (TYPE_MAIN_VARIANT (t) == t)
5343 bool existed;
5344 tree &copy
5345 = fld_incomplete_types->get_or_insert (t, &existed);
5347 if (!existed)
5349 copy = build_distinct_type_copy (t);
5351 /* It is possible that type was not seen by free_lang_data yet. */
5352 if (!fld->pset.add (copy))
5353 add_tree_to_fld_list (copy, fld);
5354 TYPE_SIZE (copy) = NULL;
5355 TYPE_USER_ALIGN (copy) = 0;
5356 TYPE_SIZE_UNIT (copy) = NULL;
5357 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5358 TREE_ADDRESSABLE (copy) = 0;
5359 if (AGGREGATE_TYPE_P (t))
5361 SET_TYPE_MODE (copy, VOIDmode);
5362 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5363 TYPE_TYPELESS_STORAGE (copy) = 0;
5364 TYPE_FIELDS (copy) = NULL;
5365 TYPE_BINFO (copy) = NULL;
5367 else
5368 TYPE_VALUES (copy) = NULL;
5370 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5371 This is needed for ODR violation warnings to come out right (we
5372 want duplicate TYPE_DECLs whenever the type is duplicated because
5373 of ODR violation. Because lang data in the TYPE_DECL may not
5374 have been freed yet, rebuild it from scratch and copy relevant
5375 fields. */
5376 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5377 tree name = TYPE_NAME (copy);
5379 if (name && TREE_CODE (name) == TYPE_DECL)
5381 gcc_checking_assert (TREE_TYPE (name) == t);
5382 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5383 DECL_NAME (name), copy);
5384 if (DECL_ASSEMBLER_NAME_SET_P (name))
5385 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5386 SET_DECL_ALIGN (name2, 0);
5387 DECL_CONTEXT (name2) = fld_decl_context
5388 (DECL_CONTEXT (name));
5389 TYPE_NAME (copy) = name2;
5392 return copy;
5394 return (fld_type_variant
5395 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5398 /* Simplify type T for scenarios where we do not need complete pointer
5399 types. */
5401 static tree
5402 fld_simplified_type (tree t, class free_lang_data_d *fld)
5404 if (!t)
5405 return t;
5406 if (POINTER_TYPE_P (t))
5407 return fld_incomplete_type_of (t, fld);
5408 /* FIXME: This triggers verification error, see PR88140. */
5409 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5410 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5411 fld_simplified_types, fld);
5412 return t;
5415 /* Reset the expression *EXPR_P, a size or position.
5417 ??? We could reset all non-constant sizes or positions. But it's cheap
5418 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5420 We need to reset self-referential sizes or positions because they cannot
5421 be gimplified and thus can contain a CALL_EXPR after the gimplification
5422 is finished, which will run afoul of LTO streaming. And they need to be
5423 reset to something essentially dummy but not constant, so as to preserve
5424 the properties of the object they are attached to. */
5426 static inline void
5427 free_lang_data_in_one_sizepos (tree *expr_p)
5429 tree expr = *expr_p;
5430 if (CONTAINS_PLACEHOLDER_P (expr))
5431 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5435 /* Reset all the fields in a binfo node BINFO. We only keep
5436 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5438 static void
5439 free_lang_data_in_binfo (tree binfo)
5441 unsigned i;
5442 tree t;
5444 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5446 BINFO_VIRTUALS (binfo) = NULL_TREE;
5447 BINFO_BASE_ACCESSES (binfo) = NULL;
5448 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5449 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5450 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5452 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5453 free_lang_data_in_binfo (t);
5457 /* Reset all language specific information still present in TYPE. */
5459 static void
5460 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5462 gcc_assert (TYPE_P (type));
5464 /* Give the FE a chance to remove its own data first. */
5465 lang_hooks.free_lang_data (type);
5467 TREE_LANG_FLAG_0 (type) = 0;
5468 TREE_LANG_FLAG_1 (type) = 0;
5469 TREE_LANG_FLAG_2 (type) = 0;
5470 TREE_LANG_FLAG_3 (type) = 0;
5471 TREE_LANG_FLAG_4 (type) = 0;
5472 TREE_LANG_FLAG_5 (type) = 0;
5473 TREE_LANG_FLAG_6 (type) = 0;
5475 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5477 /* Purge non-marked variants from the variants chain, so that they
5478 don't reappear in the IL after free_lang_data. */
5479 while (TYPE_NEXT_VARIANT (type)
5480 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5482 tree t = TYPE_NEXT_VARIANT (type);
5483 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5484 /* Turn the removed types into distinct types. */
5485 TYPE_MAIN_VARIANT (t) = t;
5486 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5489 if (TREE_CODE (type) == FUNCTION_TYPE)
5491 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5492 /* Remove the const and volatile qualifiers from arguments. The
5493 C++ front end removes them, but the C front end does not,
5494 leading to false ODR violation errors when merging two
5495 instances of the same function signature compiled by
5496 different front ends. */
5497 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5499 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5500 tree arg_type = TREE_VALUE (p);
5502 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5504 int quals = TYPE_QUALS (arg_type)
5505 & ~TYPE_QUAL_CONST
5506 & ~TYPE_QUAL_VOLATILE;
5507 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5508 if (!fld->pset.add (TREE_VALUE (p)))
5509 free_lang_data_in_type (TREE_VALUE (p), fld);
5511 /* C++ FE uses TREE_PURPOSE to store initial values. */
5512 TREE_PURPOSE (p) = NULL;
5515 else if (TREE_CODE (type) == METHOD_TYPE)
5517 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5518 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5520 /* C++ FE uses TREE_PURPOSE to store initial values. */
5521 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5522 TREE_PURPOSE (p) = NULL;
5525 else if (RECORD_OR_UNION_TYPE_P (type))
5527 /* Remove members that are not FIELD_DECLs from the field list
5528 of an aggregate. These occur in C++. */
5529 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5530 if (TREE_CODE (member) == FIELD_DECL)
5531 prev = &DECL_CHAIN (member);
5532 else
5533 *prev = DECL_CHAIN (member);
5535 TYPE_VFIELD (type) = NULL_TREE;
5537 if (TYPE_BINFO (type))
5539 free_lang_data_in_binfo (TYPE_BINFO (type));
5540 /* We need to preserve link to bases and virtual table for all
5541 polymorphic types to make devirtualization machinery working. */
5542 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5543 TYPE_BINFO (type) = NULL;
5546 else if (INTEGRAL_TYPE_P (type)
5547 || SCALAR_FLOAT_TYPE_P (type)
5548 || FIXED_POINT_TYPE_P (type))
5550 if (TREE_CODE (type) == ENUMERAL_TYPE)
5552 /* Type values are used only for C++ ODR checking. Drop them
5553 for all type variants and non-ODR types.
5554 For ODR types the data is freed in free_odr_warning_data. */
5555 if (TYPE_MAIN_VARIANT (type) != type
5556 || !type_with_linkage_p (type))
5557 TYPE_VALUES (type) = NULL;
5558 else
5559 /* Simplify representation by recording only values rather
5560 than const decls. */
5561 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5562 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5563 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5565 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5566 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5569 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5571 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5572 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5574 if (TYPE_CONTEXT (type)
5575 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5577 tree ctx = TYPE_CONTEXT (type);
5580 ctx = BLOCK_SUPERCONTEXT (ctx);
5582 while (ctx && TREE_CODE (ctx) == BLOCK);
5583 TYPE_CONTEXT (type) = ctx;
5586 TYPE_STUB_DECL (type) = NULL;
5587 TYPE_NAME (type) = fld_simplified_type_name (type);
5591 /* Return true if DECL may need an assembler name to be set. */
5593 static inline bool
5594 need_assembler_name_p (tree decl)
5596 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5597 Rule merging. This makes type_odr_p to return true on those types during
5598 LTO and by comparing the mangled name, we can say what types are intended
5599 to be equivalent across compilation unit.
5601 We do not store names of type_in_anonymous_namespace_p.
5603 Record, union and enumeration type have linkage that allows use
5604 to check type_in_anonymous_namespace_p. We do not mangle compound types
5605 that always can be compared structurally.
5607 Similarly for builtin types, we compare properties of their main variant.
5608 A special case are integer types where mangling do make differences
5609 between char/signed char/unsigned char etc. Storing name for these makes
5610 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5611 See cp/mangle.c:write_builtin_type for details. */
5613 if (TREE_CODE (decl) == TYPE_DECL)
5615 if (DECL_NAME (decl)
5616 && decl == TYPE_NAME (TREE_TYPE (decl))
5617 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5618 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5619 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5620 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5621 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5622 && (type_with_linkage_p (TREE_TYPE (decl))
5623 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5624 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5625 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5626 return false;
5628 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5629 if (!VAR_OR_FUNCTION_DECL_P (decl))
5630 return false;
5632 /* If DECL already has its assembler name set, it does not need a
5633 new one. */
5634 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5635 || DECL_ASSEMBLER_NAME_SET_P (decl))
5636 return false;
5638 /* Abstract decls do not need an assembler name. */
5639 if (DECL_ABSTRACT_P (decl))
5640 return false;
5642 /* For VAR_DECLs, only static, public and external symbols need an
5643 assembler name. */
5644 if (VAR_P (decl)
5645 && !TREE_STATIC (decl)
5646 && !TREE_PUBLIC (decl)
5647 && !DECL_EXTERNAL (decl))
5648 return false;
5650 if (TREE_CODE (decl) == FUNCTION_DECL)
5652 /* Do not set assembler name on builtins. Allow RTL expansion to
5653 decide whether to expand inline or via a regular call. */
5654 if (fndecl_built_in_p (decl)
5655 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5656 return false;
5658 /* Functions represented in the callgraph need an assembler name. */
5659 if (cgraph_node::get (decl) != NULL)
5660 return true;
5662 /* Unused and not public functions don't need an assembler name. */
5663 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5664 return false;
5667 return true;
5671 /* Reset all language specific information still present in symbol
5672 DECL. */
5674 static void
5675 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5677 gcc_assert (DECL_P (decl));
5679 /* Give the FE a chance to remove its own data first. */
5680 lang_hooks.free_lang_data (decl);
5682 TREE_LANG_FLAG_0 (decl) = 0;
5683 TREE_LANG_FLAG_1 (decl) = 0;
5684 TREE_LANG_FLAG_2 (decl) = 0;
5685 TREE_LANG_FLAG_3 (decl) = 0;
5686 TREE_LANG_FLAG_4 (decl) = 0;
5687 TREE_LANG_FLAG_5 (decl) = 0;
5688 TREE_LANG_FLAG_6 (decl) = 0;
5690 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5691 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5692 if (TREE_CODE (decl) == FIELD_DECL)
5694 DECL_FCONTEXT (decl) = NULL;
5695 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5696 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5697 DECL_QUALIFIER (decl) = NULL_TREE;
5700 if (TREE_CODE (decl) == FUNCTION_DECL)
5702 struct cgraph_node *node;
5703 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5704 the address may be taken in other unit, so this flag has no practical
5705 use for middle-end.
5707 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5708 for public objects that indeed cannot be adressed, but it is not
5709 the case. Set the flag to true so we do not get merge failures for
5710 i.e. virtual tables between units that take address of it and
5711 units that don't. */
5712 if (TREE_PUBLIC (decl))
5713 TREE_ADDRESSABLE (decl) = true;
5714 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5715 if (!(node = cgraph_node::get (decl))
5716 || (!node->definition && !node->clones))
5718 if (node)
5719 node->release_body ();
5720 else
5722 release_function_body (decl);
5723 DECL_ARGUMENTS (decl) = NULL;
5724 DECL_RESULT (decl) = NULL;
5725 DECL_INITIAL (decl) = error_mark_node;
5728 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5730 tree t;
5732 /* If DECL has a gimple body, then the context for its
5733 arguments must be DECL. Otherwise, it doesn't really
5734 matter, as we will not be emitting any code for DECL. In
5735 general, there may be other instances of DECL created by
5736 the front end and since PARM_DECLs are generally shared,
5737 their DECL_CONTEXT changes as the replicas of DECL are
5738 created. The only time where DECL_CONTEXT is important
5739 is for the FUNCTION_DECLs that have a gimple body (since
5740 the PARM_DECL will be used in the function's body). */
5741 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5742 DECL_CONTEXT (t) = decl;
5743 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5744 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5745 = target_option_default_node;
5746 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5747 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5748 = optimization_default_node;
5751 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5752 At this point, it is not needed anymore. */
5753 DECL_SAVED_TREE (decl) = NULL_TREE;
5755 /* Clear the abstract origin if it refers to a method.
5756 Otherwise dwarf2out.c will ICE as we splice functions out of
5757 TYPE_FIELDS and thus the origin will not be output
5758 correctly. */
5759 if (DECL_ABSTRACT_ORIGIN (decl)
5760 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5761 && RECORD_OR_UNION_TYPE_P
5762 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5763 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5765 DECL_VINDEX (decl) = NULL_TREE;
5767 else if (VAR_P (decl))
5769 /* See comment above why we set the flag for functoins. */
5770 if (TREE_PUBLIC (decl))
5771 TREE_ADDRESSABLE (decl) = true;
5772 if ((DECL_EXTERNAL (decl)
5773 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5774 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5775 DECL_INITIAL (decl) = NULL_TREE;
5777 else if (TREE_CODE (decl) == TYPE_DECL)
5779 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5780 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5781 TREE_PUBLIC (decl) = 0;
5782 TREE_PRIVATE (decl) = 0;
5783 DECL_ARTIFICIAL (decl) = 0;
5784 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5785 DECL_INITIAL (decl) = NULL_TREE;
5786 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5787 DECL_MODE (decl) = VOIDmode;
5788 SET_DECL_ALIGN (decl, 0);
5789 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5791 else if (TREE_CODE (decl) == FIELD_DECL)
5793 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5794 DECL_INITIAL (decl) = NULL_TREE;
5796 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5797 && DECL_INITIAL (decl)
5798 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5800 /* Strip builtins from the translation-unit BLOCK. We still have targets
5801 without builtin_decl_explicit support and also builtins are shared
5802 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5803 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5804 while (*nextp)
5806 tree var = *nextp;
5807 if (fndecl_built_in_p (var))
5808 *nextp = TREE_CHAIN (var);
5809 else
5810 nextp = &TREE_CHAIN (var);
5813 /* We need to keep field decls associated with their trees. Otherwise tree
5814 merging may merge some fileds and keep others disjoint wich in turn will
5815 not do well with TREE_CHAIN pointers linking them.
5817 Also do not drop containing types for virtual methods and tables because
5818 these are needed by devirtualization.
5819 C++ destructors are special because C++ frontends sometimes produces
5820 virtual destructor as an alias of non-virtual destructor. In
5821 devirutalization code we always walk through aliases and we need
5822 context to be preserved too. See PR89335 */
5823 if (TREE_CODE (decl) != FIELD_DECL
5824 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5825 || (!DECL_VIRTUAL_P (decl)
5826 && (TREE_CODE (decl) != FUNCTION_DECL
5827 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5828 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5832 /* Operand callback helper for free_lang_data_in_node. *TP is the
5833 subtree operand being considered. */
5835 static tree
5836 find_decls_types_r (tree *tp, int *ws, void *data)
5838 tree t = *tp;
5839 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5841 if (TREE_CODE (t) == TREE_LIST)
5842 return NULL_TREE;
5844 /* Language specific nodes will be removed, so there is no need
5845 to gather anything under them. */
5846 if (is_lang_specific (t))
5848 *ws = 0;
5849 return NULL_TREE;
5852 if (DECL_P (t))
5854 /* Note that walk_tree does not traverse every possible field in
5855 decls, so we have to do our own traversals here. */
5856 add_tree_to_fld_list (t, fld);
5858 fld_worklist_push (DECL_NAME (t), fld);
5859 fld_worklist_push (DECL_CONTEXT (t), fld);
5860 fld_worklist_push (DECL_SIZE (t), fld);
5861 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5863 /* We are going to remove everything under DECL_INITIAL for
5864 TYPE_DECLs. No point walking them. */
5865 if (TREE_CODE (t) != TYPE_DECL)
5866 fld_worklist_push (DECL_INITIAL (t), fld);
5868 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5869 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5871 if (TREE_CODE (t) == FUNCTION_DECL)
5873 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5874 fld_worklist_push (DECL_RESULT (t), fld);
5876 else if (TREE_CODE (t) == FIELD_DECL)
5878 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5879 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5880 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5881 fld_worklist_push (DECL_FCONTEXT (t), fld);
5884 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5885 && DECL_HAS_VALUE_EXPR_P (t))
5886 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5888 if (TREE_CODE (t) != FIELD_DECL
5889 && TREE_CODE (t) != TYPE_DECL)
5890 fld_worklist_push (TREE_CHAIN (t), fld);
5891 *ws = 0;
5893 else if (TYPE_P (t))
5895 /* Note that walk_tree does not traverse every possible field in
5896 types, so we have to do our own traversals here. */
5897 add_tree_to_fld_list (t, fld);
5899 if (!RECORD_OR_UNION_TYPE_P (t))
5900 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5901 fld_worklist_push (TYPE_SIZE (t), fld);
5902 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5903 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5904 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5905 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5906 fld_worklist_push (TYPE_NAME (t), fld);
5907 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5908 lists, we may look types up in these lists and use them while
5909 optimizing the function body. Thus we need to free lang data
5910 in them. */
5911 if (TREE_CODE (t) == POINTER_TYPE)
5912 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5913 if (TREE_CODE (t) == REFERENCE_TYPE)
5914 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5915 if (!POINTER_TYPE_P (t))
5916 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5917 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5918 if (!RECORD_OR_UNION_TYPE_P (t))
5919 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5920 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5921 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5922 do not and want not to reach unused variants this way. */
5923 if (TYPE_CONTEXT (t))
5925 tree ctx = TYPE_CONTEXT (t);
5926 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5927 So push that instead. */
5928 while (ctx && TREE_CODE (ctx) == BLOCK)
5929 ctx = BLOCK_SUPERCONTEXT (ctx);
5930 fld_worklist_push (ctx, fld);
5932 fld_worklist_push (TYPE_CANONICAL (t), fld);
5934 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5936 unsigned i;
5937 tree tem;
5938 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5939 fld_worklist_push (TREE_TYPE (tem), fld);
5940 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5941 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5943 if (RECORD_OR_UNION_TYPE_P (t))
5945 tree tem;
5946 /* Push all TYPE_FIELDS - there can be interleaving interesting
5947 and non-interesting things. */
5948 tem = TYPE_FIELDS (t);
5949 while (tem)
5951 if (TREE_CODE (tem) == FIELD_DECL)
5952 fld_worklist_push (tem, fld);
5953 tem = TREE_CHAIN (tem);
5956 if (FUNC_OR_METHOD_TYPE_P (t))
5957 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5959 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5960 *ws = 0;
5962 else if (TREE_CODE (t) == BLOCK)
5964 for (tree *tem = &BLOCK_VARS (t); *tem; )
5966 if (TREE_CODE (*tem) != LABEL_DECL
5967 && (TREE_CODE (*tem) != VAR_DECL
5968 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
5970 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5971 && TREE_CODE (*tem) != PARM_DECL);
5972 *tem = TREE_CHAIN (*tem);
5974 else
5976 fld_worklist_push (*tem, fld);
5977 tem = &TREE_CHAIN (*tem);
5980 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5981 fld_worklist_push (tem, fld);
5982 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5985 if (TREE_CODE (t) != IDENTIFIER_NODE
5986 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5987 fld_worklist_push (TREE_TYPE (t), fld);
5989 return NULL_TREE;
5993 /* Find decls and types in T. */
5995 static void
5996 find_decls_types (tree t, class free_lang_data_d *fld)
5998 while (1)
6000 if (!fld->pset.contains (t))
6001 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6002 if (fld->worklist.is_empty ())
6003 break;
6004 t = fld->worklist.pop ();
6008 /* Translate all the types in LIST with the corresponding runtime
6009 types. */
6011 static tree
6012 get_eh_types_for_runtime (tree list)
6014 tree head, prev;
6016 if (list == NULL_TREE)
6017 return NULL_TREE;
6019 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6020 prev = head;
6021 list = TREE_CHAIN (list);
6022 while (list)
6024 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6025 TREE_CHAIN (prev) = n;
6026 prev = TREE_CHAIN (prev);
6027 list = TREE_CHAIN (list);
6030 return head;
6034 /* Find decls and types referenced in EH region R and store them in
6035 FLD->DECLS and FLD->TYPES. */
6037 static void
6038 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6040 switch (r->type)
6042 case ERT_CLEANUP:
6043 break;
6045 case ERT_TRY:
6047 eh_catch c;
6049 /* The types referenced in each catch must first be changed to the
6050 EH types used at runtime. This removes references to FE types
6051 in the region. */
6052 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6054 c->type_list = get_eh_types_for_runtime (c->type_list);
6055 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6058 break;
6060 case ERT_ALLOWED_EXCEPTIONS:
6061 r->u.allowed.type_list
6062 = get_eh_types_for_runtime (r->u.allowed.type_list);
6063 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6064 break;
6066 case ERT_MUST_NOT_THROW:
6067 walk_tree (&r->u.must_not_throw.failure_decl,
6068 find_decls_types_r, fld, &fld->pset);
6069 break;
6074 /* Find decls and types referenced in cgraph node N and store them in
6075 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6076 look for *every* kind of DECL and TYPE node reachable from N,
6077 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6078 NAMESPACE_DECLs, etc). */
6080 static void
6081 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6083 basic_block bb;
6084 struct function *fn;
6085 unsigned ix;
6086 tree t;
6088 find_decls_types (n->decl, fld);
6090 if (!gimple_has_body_p (n->decl))
6091 return;
6093 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6095 fn = DECL_STRUCT_FUNCTION (n->decl);
6097 /* Traverse locals. */
6098 FOR_EACH_LOCAL_DECL (fn, ix, t)
6099 find_decls_types (t, fld);
6101 /* Traverse EH regions in FN. */
6103 eh_region r;
6104 FOR_ALL_EH_REGION_FN (r, fn)
6105 find_decls_types_in_eh_region (r, fld);
6108 /* Traverse every statement in FN. */
6109 FOR_EACH_BB_FN (bb, fn)
6111 gphi_iterator psi;
6112 gimple_stmt_iterator si;
6113 unsigned i;
6115 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6117 gphi *phi = psi.phi ();
6119 for (i = 0; i < gimple_phi_num_args (phi); i++)
6121 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6122 find_decls_types (*arg_p, fld);
6126 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6128 gimple *stmt = gsi_stmt (si);
6130 if (is_gimple_call (stmt))
6131 find_decls_types (gimple_call_fntype (stmt), fld);
6133 for (i = 0; i < gimple_num_ops (stmt); i++)
6135 tree arg = gimple_op (stmt, i);
6136 find_decls_types (arg, fld);
6137 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6138 which we need for asm stmts. */
6139 if (arg
6140 && TREE_CODE (arg) == TREE_LIST
6141 && TREE_PURPOSE (arg)
6142 && gimple_code (stmt) == GIMPLE_ASM)
6143 find_decls_types (TREE_PURPOSE (arg), fld);
6150 /* Find decls and types referenced in varpool node N and store them in
6151 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6152 look for *every* kind of DECL and TYPE node reachable from N,
6153 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6154 NAMESPACE_DECLs, etc). */
6156 static void
6157 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6159 find_decls_types (v->decl, fld);
6162 /* If T needs an assembler name, have one created for it. */
6164 void
6165 assign_assembler_name_if_needed (tree t)
6167 if (need_assembler_name_p (t))
6169 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6170 diagnostics that use input_location to show locus
6171 information. The problem here is that, at this point,
6172 input_location is generally anchored to the end of the file
6173 (since the parser is long gone), so we don't have a good
6174 position to pin it to.
6176 To alleviate this problem, this uses the location of T's
6177 declaration. Examples of this are
6178 testsuite/g++.dg/template/cond2.C and
6179 testsuite/g++.dg/template/pr35240.C. */
6180 location_t saved_location = input_location;
6181 input_location = DECL_SOURCE_LOCATION (t);
6183 decl_assembler_name (t);
6185 input_location = saved_location;
6190 /* Free language specific information for every operand and expression
6191 in every node of the call graph. This process operates in three stages:
6193 1- Every callgraph node and varpool node is traversed looking for
6194 decls and types embedded in them. This is a more exhaustive
6195 search than that done by find_referenced_vars, because it will
6196 also collect individual fields, decls embedded in types, etc.
6198 2- All the decls found are sent to free_lang_data_in_decl.
6200 3- All the types found are sent to free_lang_data_in_type.
6202 The ordering between decls and types is important because
6203 free_lang_data_in_decl sets assembler names, which includes
6204 mangling. So types cannot be freed up until assembler names have
6205 been set up. */
6207 static void
6208 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6210 struct cgraph_node *n;
6211 varpool_node *v;
6212 tree t;
6213 unsigned i;
6214 alias_pair *p;
6216 /* Find decls and types in the body of every function in the callgraph. */
6217 FOR_EACH_FUNCTION (n)
6218 find_decls_types_in_node (n, fld);
6220 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6221 find_decls_types (p->decl, fld);
6223 /* Find decls and types in every varpool symbol. */
6224 FOR_EACH_VARIABLE (v)
6225 find_decls_types_in_var (v, fld);
6227 /* Set the assembler name on every decl found. We need to do this
6228 now because free_lang_data_in_decl will invalidate data needed
6229 for mangling. This breaks mangling on interdependent decls. */
6230 FOR_EACH_VEC_ELT (fld->decls, i, t)
6231 assign_assembler_name_if_needed (t);
6233 /* Traverse every decl found freeing its language data. */
6234 FOR_EACH_VEC_ELT (fld->decls, i, t)
6235 free_lang_data_in_decl (t, fld);
6237 /* Traverse every type found freeing its language data. */
6238 FOR_EACH_VEC_ELT (fld->types, i, t)
6239 free_lang_data_in_type (t, fld);
6243 /* Free resources that are used by FE but are not needed once they are done. */
6245 static unsigned
6246 free_lang_data (void)
6248 unsigned i;
6249 class free_lang_data_d fld;
6251 /* If we are the LTO frontend we have freed lang-specific data already. */
6252 if (in_lto_p
6253 || (!flag_generate_lto && !flag_generate_offload))
6255 /* Rebuild type inheritance graph even when not doing LTO to get
6256 consistent profile data. */
6257 rebuild_type_inheritance_graph ();
6258 return 0;
6261 fld_incomplete_types = new hash_map<tree, tree>;
6262 fld_simplified_types = new hash_map<tree, tree>;
6264 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6265 if (vec_safe_is_empty (all_translation_units))
6266 build_translation_unit_decl (NULL_TREE);
6268 /* Allocate and assign alias sets to the standard integer types
6269 while the slots are still in the way the frontends generated them. */
6270 for (i = 0; i < itk_none; ++i)
6271 if (integer_types[i])
6272 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6274 /* Traverse the IL resetting language specific information for
6275 operands, expressions, etc. */
6276 free_lang_data_in_cgraph (&fld);
6278 /* Create gimple variants for common types. */
6279 for (unsigned i = 0;
6280 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6281 ++i)
6282 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6284 /* Reset some langhooks. Do not reset types_compatible_p, it may
6285 still be used indirectly via the get_alias_set langhook. */
6286 lang_hooks.dwarf_name = lhd_dwarf_name;
6287 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6288 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6289 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6290 lang_hooks.print_xnode = lhd_print_tree_nothing;
6291 lang_hooks.print_decl = lhd_print_tree_nothing;
6292 lang_hooks.print_type = lhd_print_tree_nothing;
6293 lang_hooks.print_identifier = lhd_print_tree_nothing;
6295 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6297 if (flag_checking)
6299 int i;
6300 tree t;
6302 FOR_EACH_VEC_ELT (fld.types, i, t)
6303 verify_type (t);
6306 /* We do not want the default decl_assembler_name implementation,
6307 rather if we have fixed everything we want a wrapper around it
6308 asserting that all non-local symbols already got their assembler
6309 name and only produce assembler names for local symbols. Or rather
6310 make sure we never call decl_assembler_name on local symbols and
6311 devise a separate, middle-end private scheme for it. */
6313 /* Reset diagnostic machinery. */
6314 tree_diagnostics_defaults (global_dc);
6316 rebuild_type_inheritance_graph ();
6318 delete fld_incomplete_types;
6319 delete fld_simplified_types;
6321 return 0;
6325 namespace {
6327 const pass_data pass_data_ipa_free_lang_data =
6329 SIMPLE_IPA_PASS, /* type */
6330 "*free_lang_data", /* name */
6331 OPTGROUP_NONE, /* optinfo_flags */
6332 TV_IPA_FREE_LANG_DATA, /* tv_id */
6333 0, /* properties_required */
6334 0, /* properties_provided */
6335 0, /* properties_destroyed */
6336 0, /* todo_flags_start */
6337 0, /* todo_flags_finish */
6340 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6342 public:
6343 pass_ipa_free_lang_data (gcc::context *ctxt)
6344 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6347 /* opt_pass methods: */
6348 virtual unsigned int execute (function *) { return free_lang_data (); }
6350 }; // class pass_ipa_free_lang_data
6352 } // anon namespace
6354 simple_ipa_opt_pass *
6355 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6357 return new pass_ipa_free_lang_data (ctxt);
6360 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6361 of the various TYPE_QUAL values. */
6363 static void
6364 set_type_quals (tree type, int type_quals)
6366 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6367 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6368 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6369 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6370 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6373 /* Returns true iff CAND and BASE have equivalent language-specific
6374 qualifiers. */
6376 bool
6377 check_lang_type (const_tree cand, const_tree base)
6379 if (lang_hooks.types.type_hash_eq == NULL)
6380 return true;
6381 /* type_hash_eq currently only applies to these types. */
6382 if (TREE_CODE (cand) != FUNCTION_TYPE
6383 && TREE_CODE (cand) != METHOD_TYPE)
6384 return true;
6385 return lang_hooks.types.type_hash_eq (cand, base);
6388 /* This function checks to see if TYPE matches the size one of the built-in
6389 atomic types, and returns that core atomic type. */
6391 static tree
6392 find_atomic_core_type (const_tree type)
6394 tree base_atomic_type;
6396 /* Only handle complete types. */
6397 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6398 return NULL_TREE;
6400 switch (tree_to_uhwi (TYPE_SIZE (type)))
6402 case 8:
6403 base_atomic_type = atomicQI_type_node;
6404 break;
6406 case 16:
6407 base_atomic_type = atomicHI_type_node;
6408 break;
6410 case 32:
6411 base_atomic_type = atomicSI_type_node;
6412 break;
6414 case 64:
6415 base_atomic_type = atomicDI_type_node;
6416 break;
6418 case 128:
6419 base_atomic_type = atomicTI_type_node;
6420 break;
6422 default:
6423 base_atomic_type = NULL_TREE;
6426 return base_atomic_type;
6429 /* Returns true iff unqualified CAND and BASE are equivalent. */
6431 bool
6432 check_base_type (const_tree cand, const_tree base)
6434 if (TYPE_NAME (cand) != TYPE_NAME (base)
6435 /* Apparently this is needed for Objective-C. */
6436 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6437 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6438 TYPE_ATTRIBUTES (base)))
6439 return false;
6440 /* Check alignment. */
6441 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6442 return true;
6443 /* Atomic types increase minimal alignment. We must to do so as well
6444 or we get duplicated canonical types. See PR88686. */
6445 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6447 /* See if this object can map to a basic atomic type. */
6448 tree atomic_type = find_atomic_core_type (cand);
6449 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6450 return true;
6452 return false;
6455 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6457 bool
6458 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6460 return (TYPE_QUALS (cand) == type_quals
6461 && check_base_type (cand, base)
6462 && check_lang_type (cand, base));
6465 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6467 static bool
6468 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6470 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6471 && TYPE_NAME (cand) == TYPE_NAME (base)
6472 /* Apparently this is needed for Objective-C. */
6473 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6474 /* Check alignment. */
6475 && TYPE_ALIGN (cand) == align
6476 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6477 TYPE_ATTRIBUTES (base))
6478 && check_lang_type (cand, base));
6481 /* Return a version of the TYPE, qualified as indicated by the
6482 TYPE_QUALS, if one exists. If no qualified version exists yet,
6483 return NULL_TREE. */
6485 tree
6486 get_qualified_type (tree type, int type_quals)
6488 if (TYPE_QUALS (type) == type_quals)
6489 return type;
6491 tree mv = TYPE_MAIN_VARIANT (type);
6492 if (check_qualified_type (mv, type, type_quals))
6493 return mv;
6495 /* Search the chain of variants to see if there is already one there just
6496 like the one we need to have. If so, use that existing one. We must
6497 preserve the TYPE_NAME, since there is code that depends on this. */
6498 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6499 if (check_qualified_type (*tp, type, type_quals))
6501 /* Put the found variant at the head of the variant list so
6502 frequently searched variants get found faster. The C++ FE
6503 benefits greatly from this. */
6504 tree t = *tp;
6505 *tp = TYPE_NEXT_VARIANT (t);
6506 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6507 TYPE_NEXT_VARIANT (mv) = t;
6508 return t;
6511 return NULL_TREE;
6514 /* Like get_qualified_type, but creates the type if it does not
6515 exist. This function never returns NULL_TREE. */
6517 tree
6518 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6520 tree t;
6522 /* See if we already have the appropriate qualified variant. */
6523 t = get_qualified_type (type, type_quals);
6525 /* If not, build it. */
6526 if (!t)
6528 t = build_variant_type_copy (type PASS_MEM_STAT);
6529 set_type_quals (t, type_quals);
6531 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6533 /* See if this object can map to a basic atomic type. */
6534 tree atomic_type = find_atomic_core_type (type);
6535 if (atomic_type)
6537 /* Ensure the alignment of this type is compatible with
6538 the required alignment of the atomic type. */
6539 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6540 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6544 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6545 /* Propagate structural equality. */
6546 SET_TYPE_STRUCTURAL_EQUALITY (t);
6547 else if (TYPE_CANONICAL (type) != type)
6548 /* Build the underlying canonical type, since it is different
6549 from TYPE. */
6551 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6552 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6554 else
6555 /* T is its own canonical type. */
6556 TYPE_CANONICAL (t) = t;
6560 return t;
6563 /* Create a variant of type T with alignment ALIGN. */
6565 tree
6566 build_aligned_type (tree type, unsigned int align)
6568 tree t;
6570 if (TYPE_PACKED (type)
6571 || TYPE_ALIGN (type) == align)
6572 return type;
6574 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6575 if (check_aligned_type (t, type, align))
6576 return t;
6578 t = build_variant_type_copy (type);
6579 SET_TYPE_ALIGN (t, align);
6580 TYPE_USER_ALIGN (t) = 1;
6582 return t;
6585 /* Create a new distinct copy of TYPE. The new type is made its own
6586 MAIN_VARIANT. If TYPE requires structural equality checks, the
6587 resulting type requires structural equality checks; otherwise, its
6588 TYPE_CANONICAL points to itself. */
6590 tree
6591 build_distinct_type_copy (tree type MEM_STAT_DECL)
6593 tree t = copy_node (type PASS_MEM_STAT);
6595 TYPE_POINTER_TO (t) = 0;
6596 TYPE_REFERENCE_TO (t) = 0;
6598 /* Set the canonical type either to a new equivalence class, or
6599 propagate the need for structural equality checks. */
6600 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6601 SET_TYPE_STRUCTURAL_EQUALITY (t);
6602 else
6603 TYPE_CANONICAL (t) = t;
6605 /* Make it its own variant. */
6606 TYPE_MAIN_VARIANT (t) = t;
6607 TYPE_NEXT_VARIANT (t) = 0;
6609 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6610 whose TREE_TYPE is not t. This can also happen in the Ada
6611 frontend when using subtypes. */
6613 return t;
6616 /* Create a new variant of TYPE, equivalent but distinct. This is so
6617 the caller can modify it. TYPE_CANONICAL for the return type will
6618 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6619 are considered equal by the language itself (or that both types
6620 require structural equality checks). */
6622 tree
6623 build_variant_type_copy (tree type MEM_STAT_DECL)
6625 tree t, m = TYPE_MAIN_VARIANT (type);
6627 t = build_distinct_type_copy (type PASS_MEM_STAT);
6629 /* Since we're building a variant, assume that it is a non-semantic
6630 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6631 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6632 /* Type variants have no alias set defined. */
6633 TYPE_ALIAS_SET (t) = -1;
6635 /* Add the new type to the chain of variants of TYPE. */
6636 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6637 TYPE_NEXT_VARIANT (m) = t;
6638 TYPE_MAIN_VARIANT (t) = m;
6640 return t;
6643 /* Return true if the from tree in both tree maps are equal. */
6646 tree_map_base_eq (const void *va, const void *vb)
6648 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6649 *const b = (const struct tree_map_base *) vb;
6650 return (a->from == b->from);
6653 /* Hash a from tree in a tree_base_map. */
6655 unsigned int
6656 tree_map_base_hash (const void *item)
6658 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6661 /* Return true if this tree map structure is marked for garbage collection
6662 purposes. We simply return true if the from tree is marked, so that this
6663 structure goes away when the from tree goes away. */
6666 tree_map_base_marked_p (const void *p)
6668 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6671 /* Hash a from tree in a tree_map. */
6673 unsigned int
6674 tree_map_hash (const void *item)
6676 return (((const struct tree_map *) item)->hash);
6679 /* Hash a from tree in a tree_decl_map. */
6681 unsigned int
6682 tree_decl_map_hash (const void *item)
6684 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6687 /* Return the initialization priority for DECL. */
6689 priority_type
6690 decl_init_priority_lookup (tree decl)
6692 symtab_node *snode = symtab_node::get (decl);
6694 if (!snode)
6695 return DEFAULT_INIT_PRIORITY;
6696 return
6697 snode->get_init_priority ();
6700 /* Return the finalization priority for DECL. */
6702 priority_type
6703 decl_fini_priority_lookup (tree decl)
6705 cgraph_node *node = cgraph_node::get (decl);
6707 if (!node)
6708 return DEFAULT_INIT_PRIORITY;
6709 return
6710 node->get_fini_priority ();
6713 /* Set the initialization priority for DECL to PRIORITY. */
6715 void
6716 decl_init_priority_insert (tree decl, priority_type priority)
6718 struct symtab_node *snode;
6720 if (priority == DEFAULT_INIT_PRIORITY)
6722 snode = symtab_node::get (decl);
6723 if (!snode)
6724 return;
6726 else if (VAR_P (decl))
6727 snode = varpool_node::get_create (decl);
6728 else
6729 snode = cgraph_node::get_create (decl);
6730 snode->set_init_priority (priority);
6733 /* Set the finalization priority for DECL to PRIORITY. */
6735 void
6736 decl_fini_priority_insert (tree decl, priority_type priority)
6738 struct cgraph_node *node;
6740 if (priority == DEFAULT_INIT_PRIORITY)
6742 node = cgraph_node::get (decl);
6743 if (!node)
6744 return;
6746 else
6747 node = cgraph_node::get_create (decl);
6748 node->set_fini_priority (priority);
6751 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6753 static void
6754 print_debug_expr_statistics (void)
6756 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6757 (long) debug_expr_for_decl->size (),
6758 (long) debug_expr_for_decl->elements (),
6759 debug_expr_for_decl->collisions ());
6762 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6764 static void
6765 print_value_expr_statistics (void)
6767 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6768 (long) value_expr_for_decl->size (),
6769 (long) value_expr_for_decl->elements (),
6770 value_expr_for_decl->collisions ());
6773 /* Lookup a debug expression for FROM, and return it if we find one. */
6775 tree
6776 decl_debug_expr_lookup (tree from)
6778 struct tree_decl_map *h, in;
6779 in.base.from = from;
6781 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6782 if (h)
6783 return h->to;
6784 return NULL_TREE;
6787 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6789 void
6790 decl_debug_expr_insert (tree from, tree to)
6792 struct tree_decl_map *h;
6794 h = ggc_alloc<tree_decl_map> ();
6795 h->base.from = from;
6796 h->to = to;
6797 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6800 /* Lookup a value expression for FROM, and return it if we find one. */
6802 tree
6803 decl_value_expr_lookup (tree from)
6805 struct tree_decl_map *h, in;
6806 in.base.from = from;
6808 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6809 if (h)
6810 return h->to;
6811 return NULL_TREE;
6814 /* Insert a mapping FROM->TO in the value expression hashtable. */
6816 void
6817 decl_value_expr_insert (tree from, tree to)
6819 struct tree_decl_map *h;
6821 h = ggc_alloc<tree_decl_map> ();
6822 h->base.from = from;
6823 h->to = to;
6824 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6827 /* Lookup a vector of debug arguments for FROM, and return it if we
6828 find one. */
6830 vec<tree, va_gc> **
6831 decl_debug_args_lookup (tree from)
6833 struct tree_vec_map *h, in;
6835 if (!DECL_HAS_DEBUG_ARGS_P (from))
6836 return NULL;
6837 gcc_checking_assert (debug_args_for_decl != NULL);
6838 in.base.from = from;
6839 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6840 if (h)
6841 return &h->to;
6842 return NULL;
6845 /* Insert a mapping FROM->empty vector of debug arguments in the value
6846 expression hashtable. */
6848 vec<tree, va_gc> **
6849 decl_debug_args_insert (tree from)
6851 struct tree_vec_map *h;
6852 tree_vec_map **loc;
6854 if (DECL_HAS_DEBUG_ARGS_P (from))
6855 return decl_debug_args_lookup (from);
6856 if (debug_args_for_decl == NULL)
6857 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6858 h = ggc_alloc<tree_vec_map> ();
6859 h->base.from = from;
6860 h->to = NULL;
6861 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6862 *loc = h;
6863 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6864 return &h->to;
6867 /* Hashing of types so that we don't make duplicates.
6868 The entry point is `type_hash_canon'. */
6870 /* Generate the default hash code for TYPE. This is designed for
6871 speed, rather than maximum entropy. */
6873 hashval_t
6874 type_hash_canon_hash (tree type)
6876 inchash::hash hstate;
6878 hstate.add_int (TREE_CODE (type));
6880 if (TREE_TYPE (type))
6881 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6883 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6884 /* Just the identifier is adequate to distinguish. */
6885 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6887 switch (TREE_CODE (type))
6889 case METHOD_TYPE:
6890 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6891 /* FALLTHROUGH. */
6892 case FUNCTION_TYPE:
6893 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6894 if (TREE_VALUE (t) != error_mark_node)
6895 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6896 break;
6898 case OFFSET_TYPE:
6899 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6900 break;
6902 case ARRAY_TYPE:
6904 if (TYPE_DOMAIN (type))
6905 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6906 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6908 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6909 hstate.add_object (typeless);
6912 break;
6914 case INTEGER_TYPE:
6916 tree t = TYPE_MAX_VALUE (type);
6917 if (!t)
6918 t = TYPE_MIN_VALUE (type);
6919 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6920 hstate.add_object (TREE_INT_CST_ELT (t, i));
6921 break;
6924 case REAL_TYPE:
6925 case FIXED_POINT_TYPE:
6927 unsigned prec = TYPE_PRECISION (type);
6928 hstate.add_object (prec);
6929 break;
6932 case VECTOR_TYPE:
6933 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6934 break;
6936 default:
6937 break;
6940 return hstate.end ();
6943 /* These are the Hashtable callback functions. */
6945 /* Returns true iff the types are equivalent. */
6947 bool
6948 type_cache_hasher::equal (type_hash *a, type_hash *b)
6950 /* First test the things that are the same for all types. */
6951 if (a->hash != b->hash
6952 || TREE_CODE (a->type) != TREE_CODE (b->type)
6953 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6954 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6955 TYPE_ATTRIBUTES (b->type))
6956 || (TREE_CODE (a->type) != COMPLEX_TYPE
6957 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6958 return 0;
6960 /* Be careful about comparing arrays before and after the element type
6961 has been completed; don't compare TYPE_ALIGN unless both types are
6962 complete. */
6963 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6964 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6965 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6966 return 0;
6968 switch (TREE_CODE (a->type))
6970 case VOID_TYPE:
6971 case COMPLEX_TYPE:
6972 case POINTER_TYPE:
6973 case REFERENCE_TYPE:
6974 case NULLPTR_TYPE:
6975 return 1;
6977 case VECTOR_TYPE:
6978 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6979 TYPE_VECTOR_SUBPARTS (b->type));
6981 case ENUMERAL_TYPE:
6982 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6983 && !(TYPE_VALUES (a->type)
6984 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6985 && TYPE_VALUES (b->type)
6986 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6987 && type_list_equal (TYPE_VALUES (a->type),
6988 TYPE_VALUES (b->type))))
6989 return 0;
6991 /* fall through */
6993 case INTEGER_TYPE:
6994 case REAL_TYPE:
6995 case BOOLEAN_TYPE:
6996 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6997 return false;
6998 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6999 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7000 TYPE_MAX_VALUE (b->type)))
7001 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7002 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7003 TYPE_MIN_VALUE (b->type))));
7005 case FIXED_POINT_TYPE:
7006 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7008 case OFFSET_TYPE:
7009 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7011 case METHOD_TYPE:
7012 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7013 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7014 || (TYPE_ARG_TYPES (a->type)
7015 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7016 && TYPE_ARG_TYPES (b->type)
7017 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7018 && type_list_equal (TYPE_ARG_TYPES (a->type),
7019 TYPE_ARG_TYPES (b->type)))))
7020 break;
7021 return 0;
7022 case ARRAY_TYPE:
7023 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7024 where the flag should be inherited from the element type
7025 and can change after ARRAY_TYPEs are created; on non-aggregates
7026 compare it and hash it, scalars will never have that flag set
7027 and we need to differentiate between arrays created by different
7028 front-ends or middle-end created arrays. */
7029 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7030 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7031 || (TYPE_TYPELESS_STORAGE (a->type)
7032 == TYPE_TYPELESS_STORAGE (b->type))));
7034 case RECORD_TYPE:
7035 case UNION_TYPE:
7036 case QUAL_UNION_TYPE:
7037 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7038 || (TYPE_FIELDS (a->type)
7039 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7040 && TYPE_FIELDS (b->type)
7041 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7042 && type_list_equal (TYPE_FIELDS (a->type),
7043 TYPE_FIELDS (b->type))));
7045 case FUNCTION_TYPE:
7046 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7047 || (TYPE_ARG_TYPES (a->type)
7048 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7049 && TYPE_ARG_TYPES (b->type)
7050 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7051 && type_list_equal (TYPE_ARG_TYPES (a->type),
7052 TYPE_ARG_TYPES (b->type))))
7053 break;
7054 return 0;
7056 default:
7057 return 0;
7060 if (lang_hooks.types.type_hash_eq != NULL)
7061 return lang_hooks.types.type_hash_eq (a->type, b->type);
7063 return 1;
7066 /* Given TYPE, and HASHCODE its hash code, return the canonical
7067 object for an identical type if one already exists.
7068 Otherwise, return TYPE, and record it as the canonical object.
7070 To use this function, first create a type of the sort you want.
7071 Then compute its hash code from the fields of the type that
7072 make it different from other similar types.
7073 Then call this function and use the value. */
7075 tree
7076 type_hash_canon (unsigned int hashcode, tree type)
7078 type_hash in;
7079 type_hash **loc;
7081 /* The hash table only contains main variants, so ensure that's what we're
7082 being passed. */
7083 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7085 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7086 must call that routine before comparing TYPE_ALIGNs. */
7087 layout_type (type);
7089 in.hash = hashcode;
7090 in.type = type;
7092 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7093 if (*loc)
7095 tree t1 = ((type_hash *) *loc)->type;
7096 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7097 && t1 != type);
7098 if (TYPE_UID (type) + 1 == next_type_uid)
7099 --next_type_uid;
7100 /* Free also min/max values and the cache for integer
7101 types. This can't be done in free_node, as LTO frees
7102 those on its own. */
7103 if (TREE_CODE (type) == INTEGER_TYPE)
7105 if (TYPE_MIN_VALUE (type)
7106 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7108 /* Zero is always in TYPE_CACHED_VALUES. */
7109 if (! TYPE_UNSIGNED (type))
7110 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7111 ggc_free (TYPE_MIN_VALUE (type));
7113 if (TYPE_MAX_VALUE (type)
7114 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7116 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7117 ggc_free (TYPE_MAX_VALUE (type));
7119 if (TYPE_CACHED_VALUES_P (type))
7120 ggc_free (TYPE_CACHED_VALUES (type));
7122 free_node (type);
7123 return t1;
7125 else
7127 struct type_hash *h;
7129 h = ggc_alloc<type_hash> ();
7130 h->hash = hashcode;
7131 h->type = type;
7132 *loc = h;
7134 return type;
7138 static void
7139 print_type_hash_statistics (void)
7141 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7142 (long) type_hash_table->size (),
7143 (long) type_hash_table->elements (),
7144 type_hash_table->collisions ());
7147 /* Given two lists of types
7148 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7149 return 1 if the lists contain the same types in the same order.
7150 Also, the TREE_PURPOSEs must match. */
7152 bool
7153 type_list_equal (const_tree l1, const_tree l2)
7155 const_tree t1, t2;
7157 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7158 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7159 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7160 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7161 && (TREE_TYPE (TREE_PURPOSE (t1))
7162 == TREE_TYPE (TREE_PURPOSE (t2))))))
7163 return false;
7165 return t1 == t2;
7168 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7169 given by TYPE. If the argument list accepts variable arguments,
7170 then this function counts only the ordinary arguments. */
7173 type_num_arguments (const_tree fntype)
7175 int i = 0;
7177 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7178 /* If the function does not take a variable number of arguments,
7179 the last element in the list will have type `void'. */
7180 if (VOID_TYPE_P (TREE_VALUE (t)))
7181 break;
7182 else
7183 ++i;
7185 return i;
7188 /* Return the type of the function TYPE's argument ARGNO if known.
7189 For vararg function's where ARGNO refers to one of the variadic
7190 arguments return null. Otherwise, return a void_type_node for
7191 out-of-bounds ARGNO. */
7193 tree
7194 type_argument_type (const_tree fntype, unsigned argno)
7196 /* Treat zero the same as an out-of-bounds argument number. */
7197 if (!argno)
7198 return void_type_node;
7200 function_args_iterator iter;
7202 tree argtype;
7203 unsigned i = 1;
7204 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7206 /* A vararg function's argument list ends in a null. Otherwise,
7207 an ordinary function's argument list ends with void. Return
7208 null if ARGNO refers to a vararg argument, void_type_node if
7209 it's out of bounds, and the formal argument type otherwise. */
7210 if (!argtype)
7211 break;
7213 if (i == argno || VOID_TYPE_P (argtype))
7214 return argtype;
7216 ++i;
7219 return NULL_TREE;
7222 /* Nonzero if integer constants T1 and T2
7223 represent the same constant value. */
7226 tree_int_cst_equal (const_tree t1, const_tree t2)
7228 if (t1 == t2)
7229 return 1;
7231 if (t1 == 0 || t2 == 0)
7232 return 0;
7234 STRIP_ANY_LOCATION_WRAPPER (t1);
7235 STRIP_ANY_LOCATION_WRAPPER (t2);
7237 if (TREE_CODE (t1) == INTEGER_CST
7238 && TREE_CODE (t2) == INTEGER_CST
7239 && wi::to_widest (t1) == wi::to_widest (t2))
7240 return 1;
7242 return 0;
7245 /* Return true if T is an INTEGER_CST whose numerical value (extended
7246 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7248 bool
7249 tree_fits_shwi_p (const_tree t)
7251 return (t != NULL_TREE
7252 && TREE_CODE (t) == INTEGER_CST
7253 && wi::fits_shwi_p (wi::to_widest (t)));
7256 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7257 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7259 bool
7260 tree_fits_poly_int64_p (const_tree t)
7262 if (t == NULL_TREE)
7263 return false;
7264 if (POLY_INT_CST_P (t))
7266 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7267 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7268 return false;
7269 return true;
7271 return (TREE_CODE (t) == INTEGER_CST
7272 && wi::fits_shwi_p (wi::to_widest (t)));
7275 /* Return true if T is an INTEGER_CST whose numerical value (extended
7276 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7278 bool
7279 tree_fits_uhwi_p (const_tree t)
7281 return (t != NULL_TREE
7282 && TREE_CODE (t) == INTEGER_CST
7283 && wi::fits_uhwi_p (wi::to_widest (t)));
7286 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7287 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7289 bool
7290 tree_fits_poly_uint64_p (const_tree t)
7292 if (t == NULL_TREE)
7293 return false;
7294 if (POLY_INT_CST_P (t))
7296 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7297 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7298 return false;
7299 return true;
7301 return (TREE_CODE (t) == INTEGER_CST
7302 && wi::fits_uhwi_p (wi::to_widest (t)));
7305 /* T is an INTEGER_CST whose numerical value (extended according to
7306 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7307 HOST_WIDE_INT. */
7309 HOST_WIDE_INT
7310 tree_to_shwi (const_tree t)
7312 gcc_assert (tree_fits_shwi_p (t));
7313 return TREE_INT_CST_LOW (t);
7316 /* T is an INTEGER_CST whose numerical value (extended according to
7317 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7318 HOST_WIDE_INT. */
7320 unsigned HOST_WIDE_INT
7321 tree_to_uhwi (const_tree t)
7323 gcc_assert (tree_fits_uhwi_p (t));
7324 return TREE_INT_CST_LOW (t);
7327 /* Return the most significant (sign) bit of T. */
7330 tree_int_cst_sign_bit (const_tree t)
7332 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7334 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7337 /* Return an indication of the sign of the integer constant T.
7338 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7339 Note that -1 will never be returned if T's type is unsigned. */
7342 tree_int_cst_sgn (const_tree t)
7344 if (wi::to_wide (t) == 0)
7345 return 0;
7346 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7347 return 1;
7348 else if (wi::neg_p (wi::to_wide (t)))
7349 return -1;
7350 else
7351 return 1;
7354 /* Return the minimum number of bits needed to represent VALUE in a
7355 signed or unsigned type, UNSIGNEDP says which. */
7357 unsigned int
7358 tree_int_cst_min_precision (tree value, signop sgn)
7360 /* If the value is negative, compute its negative minus 1. The latter
7361 adjustment is because the absolute value of the largest negative value
7362 is one larger than the largest positive value. This is equivalent to
7363 a bit-wise negation, so use that operation instead. */
7365 if (tree_int_cst_sgn (value) < 0)
7366 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7368 /* Return the number of bits needed, taking into account the fact
7369 that we need one more bit for a signed than unsigned type.
7370 If value is 0 or -1, the minimum precision is 1 no matter
7371 whether unsignedp is true or false. */
7373 if (integer_zerop (value))
7374 return 1;
7375 else
7376 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7379 /* Return truthvalue of whether T1 is the same tree structure as T2.
7380 Return 1 if they are the same.
7381 Return 0 if they are understandably different.
7382 Return -1 if either contains tree structure not understood by
7383 this function. */
7386 simple_cst_equal (const_tree t1, const_tree t2)
7388 enum tree_code code1, code2;
7389 int cmp;
7390 int i;
7392 if (t1 == t2)
7393 return 1;
7394 if (t1 == 0 || t2 == 0)
7395 return 0;
7397 /* For location wrappers to be the same, they must be at the same
7398 source location (and wrap the same thing). */
7399 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7401 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7402 return 0;
7403 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7406 code1 = TREE_CODE (t1);
7407 code2 = TREE_CODE (t2);
7409 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7411 if (CONVERT_EXPR_CODE_P (code2)
7412 || code2 == NON_LVALUE_EXPR)
7413 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7414 else
7415 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7418 else if (CONVERT_EXPR_CODE_P (code2)
7419 || code2 == NON_LVALUE_EXPR)
7420 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7422 if (code1 != code2)
7423 return 0;
7425 switch (code1)
7427 case INTEGER_CST:
7428 return wi::to_widest (t1) == wi::to_widest (t2);
7430 case REAL_CST:
7431 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7433 case FIXED_CST:
7434 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7436 case STRING_CST:
7437 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7438 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7439 TREE_STRING_LENGTH (t1)));
7441 case CONSTRUCTOR:
7443 unsigned HOST_WIDE_INT idx;
7444 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7445 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7447 if (vec_safe_length (v1) != vec_safe_length (v2))
7448 return false;
7450 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7451 /* ??? Should we handle also fields here? */
7452 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7453 return false;
7454 return true;
7457 case SAVE_EXPR:
7458 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7460 case CALL_EXPR:
7461 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7462 if (cmp <= 0)
7463 return cmp;
7464 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7465 return 0;
7467 const_tree arg1, arg2;
7468 const_call_expr_arg_iterator iter1, iter2;
7469 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7470 arg2 = first_const_call_expr_arg (t2, &iter2);
7471 arg1 && arg2;
7472 arg1 = next_const_call_expr_arg (&iter1),
7473 arg2 = next_const_call_expr_arg (&iter2))
7475 cmp = simple_cst_equal (arg1, arg2);
7476 if (cmp <= 0)
7477 return cmp;
7479 return arg1 == arg2;
7482 case TARGET_EXPR:
7483 /* Special case: if either target is an unallocated VAR_DECL,
7484 it means that it's going to be unified with whatever the
7485 TARGET_EXPR is really supposed to initialize, so treat it
7486 as being equivalent to anything. */
7487 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7488 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7489 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7490 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7491 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7492 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7493 cmp = 1;
7494 else
7495 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7497 if (cmp <= 0)
7498 return cmp;
7500 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7502 case WITH_CLEANUP_EXPR:
7503 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7504 if (cmp <= 0)
7505 return cmp;
7507 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7509 case COMPONENT_REF:
7510 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7511 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7513 return 0;
7515 case VAR_DECL:
7516 case PARM_DECL:
7517 case CONST_DECL:
7518 case FUNCTION_DECL:
7519 return 0;
7521 default:
7522 if (POLY_INT_CST_P (t1))
7523 /* A false return means maybe_ne rather than known_ne. */
7524 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7525 TYPE_SIGN (TREE_TYPE (t1))),
7526 poly_widest_int::from (poly_int_cst_value (t2),
7527 TYPE_SIGN (TREE_TYPE (t2))));
7528 break;
7531 /* This general rule works for most tree codes. All exceptions should be
7532 handled above. If this is a language-specific tree code, we can't
7533 trust what might be in the operand, so say we don't know
7534 the situation. */
7535 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7536 return -1;
7538 switch (TREE_CODE_CLASS (code1))
7540 case tcc_unary:
7541 case tcc_binary:
7542 case tcc_comparison:
7543 case tcc_expression:
7544 case tcc_reference:
7545 case tcc_statement:
7546 cmp = 1;
7547 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7549 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7550 if (cmp <= 0)
7551 return cmp;
7554 return cmp;
7556 default:
7557 return -1;
7561 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7562 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7563 than U, respectively. */
7566 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7568 if (tree_int_cst_sgn (t) < 0)
7569 return -1;
7570 else if (!tree_fits_uhwi_p (t))
7571 return 1;
7572 else if (TREE_INT_CST_LOW (t) == u)
7573 return 0;
7574 else if (TREE_INT_CST_LOW (t) < u)
7575 return -1;
7576 else
7577 return 1;
7580 /* Return true if SIZE represents a constant size that is in bounds of
7581 what the middle-end and the backend accepts (covering not more than
7582 half of the address-space).
7583 When PERR is non-null, set *PERR on failure to the description of
7584 why SIZE is not valid. */
7586 bool
7587 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7589 if (POLY_INT_CST_P (size))
7591 if (TREE_OVERFLOW (size))
7592 return false;
7593 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7594 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7595 return false;
7596 return true;
7599 cst_size_error error;
7600 if (!perr)
7601 perr = &error;
7603 if (TREE_CODE (size) != INTEGER_CST)
7605 *perr = cst_size_not_constant;
7606 return false;
7609 if (TREE_OVERFLOW_P (size))
7611 *perr = cst_size_overflow;
7612 return false;
7615 if (tree_int_cst_sgn (size) < 0)
7617 *perr = cst_size_negative;
7618 return false;
7620 if (!tree_fits_uhwi_p (size)
7621 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7622 < wi::to_widest (size) * 2))
7624 *perr = cst_size_too_big;
7625 return false;
7628 return true;
7631 /* Return the precision of the type, or for a complex or vector type the
7632 precision of the type of its elements. */
7634 unsigned int
7635 element_precision (const_tree type)
7637 if (!TYPE_P (type))
7638 type = TREE_TYPE (type);
7639 enum tree_code code = TREE_CODE (type);
7640 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7641 type = TREE_TYPE (type);
7643 return TYPE_PRECISION (type);
7646 /* Return true if CODE represents an associative tree code. Otherwise
7647 return false. */
7648 bool
7649 associative_tree_code (enum tree_code code)
7651 switch (code)
7653 case BIT_IOR_EXPR:
7654 case BIT_AND_EXPR:
7655 case BIT_XOR_EXPR:
7656 case PLUS_EXPR:
7657 case MULT_EXPR:
7658 case MIN_EXPR:
7659 case MAX_EXPR:
7660 return true;
7662 default:
7663 break;
7665 return false;
7668 /* Return true if CODE represents a commutative tree code. Otherwise
7669 return false. */
7670 bool
7671 commutative_tree_code (enum tree_code code)
7673 switch (code)
7675 case PLUS_EXPR:
7676 case MULT_EXPR:
7677 case MULT_HIGHPART_EXPR:
7678 case MIN_EXPR:
7679 case MAX_EXPR:
7680 case BIT_IOR_EXPR:
7681 case BIT_XOR_EXPR:
7682 case BIT_AND_EXPR:
7683 case NE_EXPR:
7684 case EQ_EXPR:
7685 case UNORDERED_EXPR:
7686 case ORDERED_EXPR:
7687 case UNEQ_EXPR:
7688 case LTGT_EXPR:
7689 case TRUTH_AND_EXPR:
7690 case TRUTH_XOR_EXPR:
7691 case TRUTH_OR_EXPR:
7692 case WIDEN_MULT_EXPR:
7693 case VEC_WIDEN_MULT_HI_EXPR:
7694 case VEC_WIDEN_MULT_LO_EXPR:
7695 case VEC_WIDEN_MULT_EVEN_EXPR:
7696 case VEC_WIDEN_MULT_ODD_EXPR:
7697 return true;
7699 default:
7700 break;
7702 return false;
7705 /* Return true if CODE represents a ternary tree code for which the
7706 first two operands are commutative. Otherwise return false. */
7707 bool
7708 commutative_ternary_tree_code (enum tree_code code)
7710 switch (code)
7712 case WIDEN_MULT_PLUS_EXPR:
7713 case WIDEN_MULT_MINUS_EXPR:
7714 case DOT_PROD_EXPR:
7715 return true;
7717 default:
7718 break;
7720 return false;
7723 /* Returns true if CODE can overflow. */
7725 bool
7726 operation_can_overflow (enum tree_code code)
7728 switch (code)
7730 case PLUS_EXPR:
7731 case MINUS_EXPR:
7732 case MULT_EXPR:
7733 case LSHIFT_EXPR:
7734 /* Can overflow in various ways. */
7735 return true;
7736 case TRUNC_DIV_EXPR:
7737 case EXACT_DIV_EXPR:
7738 case FLOOR_DIV_EXPR:
7739 case CEIL_DIV_EXPR:
7740 /* For INT_MIN / -1. */
7741 return true;
7742 case NEGATE_EXPR:
7743 case ABS_EXPR:
7744 /* For -INT_MIN. */
7745 return true;
7746 default:
7747 /* These operators cannot overflow. */
7748 return false;
7752 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7753 ftrapv doesn't generate trapping insns for CODE. */
7755 bool
7756 operation_no_trapping_overflow (tree type, enum tree_code code)
7758 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7760 /* We don't generate instructions that trap on overflow for complex or vector
7761 types. */
7762 if (!INTEGRAL_TYPE_P (type))
7763 return true;
7765 if (!TYPE_OVERFLOW_TRAPS (type))
7766 return true;
7768 switch (code)
7770 case PLUS_EXPR:
7771 case MINUS_EXPR:
7772 case MULT_EXPR:
7773 case NEGATE_EXPR:
7774 case ABS_EXPR:
7775 /* These operators can overflow, and -ftrapv generates trapping code for
7776 these. */
7777 return false;
7778 case TRUNC_DIV_EXPR:
7779 case EXACT_DIV_EXPR:
7780 case FLOOR_DIV_EXPR:
7781 case CEIL_DIV_EXPR:
7782 case LSHIFT_EXPR:
7783 /* These operators can overflow, but -ftrapv does not generate trapping
7784 code for these. */
7785 return true;
7786 default:
7787 /* These operators cannot overflow. */
7788 return true;
7792 namespace inchash
7795 /* Generate a hash value for an expression. This can be used iteratively
7796 by passing a previous result as the HSTATE argument.
7798 This function is intended to produce the same hash for expressions which
7799 would compare equal using operand_equal_p. */
7800 void
7801 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7803 int i;
7804 enum tree_code code;
7805 enum tree_code_class tclass;
7807 if (t == NULL_TREE || t == error_mark_node)
7809 hstate.merge_hash (0);
7810 return;
7813 STRIP_ANY_LOCATION_WRAPPER (t);
7815 if (!(flags & OEP_ADDRESS_OF))
7816 STRIP_NOPS (t);
7818 code = TREE_CODE (t);
7820 switch (code)
7822 /* Alas, constants aren't shared, so we can't rely on pointer
7823 identity. */
7824 case VOID_CST:
7825 hstate.merge_hash (0);
7826 return;
7827 case INTEGER_CST:
7828 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7829 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7830 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7831 return;
7832 case REAL_CST:
7834 unsigned int val2;
7835 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7836 val2 = rvc_zero;
7837 else
7838 val2 = real_hash (TREE_REAL_CST_PTR (t));
7839 hstate.merge_hash (val2);
7840 return;
7842 case FIXED_CST:
7844 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7845 hstate.merge_hash (val2);
7846 return;
7848 case STRING_CST:
7849 hstate.add ((const void *) TREE_STRING_POINTER (t),
7850 TREE_STRING_LENGTH (t));
7851 return;
7852 case COMPLEX_CST:
7853 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7854 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7855 return;
7856 case VECTOR_CST:
7858 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7859 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7860 unsigned int count = vector_cst_encoded_nelts (t);
7861 for (unsigned int i = 0; i < count; ++i)
7862 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7863 return;
7865 case SSA_NAME:
7866 /* We can just compare by pointer. */
7867 hstate.add_hwi (SSA_NAME_VERSION (t));
7868 return;
7869 case PLACEHOLDER_EXPR:
7870 /* The node itself doesn't matter. */
7871 return;
7872 case BLOCK:
7873 case OMP_CLAUSE:
7874 /* Ignore. */
7875 return;
7876 case TREE_LIST:
7877 /* A list of expressions, for a CALL_EXPR or as the elements of a
7878 VECTOR_CST. */
7879 for (; t; t = TREE_CHAIN (t))
7880 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7881 return;
7882 case CONSTRUCTOR:
7884 unsigned HOST_WIDE_INT idx;
7885 tree field, value;
7886 flags &= ~OEP_ADDRESS_OF;
7887 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7889 inchash::add_expr (field, hstate, flags);
7890 inchash::add_expr (value, hstate, flags);
7892 return;
7894 case STATEMENT_LIST:
7896 tree_stmt_iterator i;
7897 for (i = tsi_start (CONST_CAST_TREE (t));
7898 !tsi_end_p (i); tsi_next (&i))
7899 inchash::add_expr (tsi_stmt (i), hstate, flags);
7900 return;
7902 case TREE_VEC:
7903 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7904 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7905 return;
7906 case IDENTIFIER_NODE:
7907 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
7908 return;
7909 case FUNCTION_DECL:
7910 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7911 Otherwise nodes that compare equal according to operand_equal_p might
7912 get different hash codes. However, don't do this for machine specific
7913 or front end builtins, since the function code is overloaded in those
7914 cases. */
7915 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7916 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7918 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7919 code = TREE_CODE (t);
7921 /* FALL THROUGH */
7922 default:
7923 if (POLY_INT_CST_P (t))
7925 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7926 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7927 return;
7929 tclass = TREE_CODE_CLASS (code);
7931 if (tclass == tcc_declaration)
7933 /* DECL's have a unique ID */
7934 hstate.add_hwi (DECL_UID (t));
7936 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7938 /* For comparisons that can be swapped, use the lower
7939 tree code. */
7940 enum tree_code ccode = swap_tree_comparison (code);
7941 if (code < ccode)
7942 ccode = code;
7943 hstate.add_object (ccode);
7944 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7945 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7947 else if (CONVERT_EXPR_CODE_P (code))
7949 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7950 operand_equal_p. */
7951 enum tree_code ccode = NOP_EXPR;
7952 hstate.add_object (ccode);
7954 /* Don't hash the type, that can lead to having nodes which
7955 compare equal according to operand_equal_p, but which
7956 have different hash codes. Make sure to include signedness
7957 in the hash computation. */
7958 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7959 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7961 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7962 else if (code == MEM_REF
7963 && (flags & OEP_ADDRESS_OF) != 0
7964 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7965 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7966 && integer_zerop (TREE_OPERAND (t, 1)))
7967 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7968 hstate, flags);
7969 /* Don't ICE on FE specific trees, or their arguments etc.
7970 during operand_equal_p hash verification. */
7971 else if (!IS_EXPR_CODE_CLASS (tclass))
7972 gcc_assert (flags & OEP_HASH_CHECK);
7973 else
7975 unsigned int sflags = flags;
7977 hstate.add_object (code);
7979 switch (code)
7981 case ADDR_EXPR:
7982 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7983 flags |= OEP_ADDRESS_OF;
7984 sflags = flags;
7985 break;
7987 case INDIRECT_REF:
7988 case MEM_REF:
7989 case TARGET_MEM_REF:
7990 flags &= ~OEP_ADDRESS_OF;
7991 sflags = flags;
7992 break;
7994 case ARRAY_REF:
7995 case ARRAY_RANGE_REF:
7996 case COMPONENT_REF:
7997 case BIT_FIELD_REF:
7998 sflags &= ~OEP_ADDRESS_OF;
7999 break;
8001 case COND_EXPR:
8002 flags &= ~OEP_ADDRESS_OF;
8003 break;
8005 case WIDEN_MULT_PLUS_EXPR:
8006 case WIDEN_MULT_MINUS_EXPR:
8008 /* The multiplication operands are commutative. */
8009 inchash::hash one, two;
8010 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8011 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8012 hstate.add_commutative (one, two);
8013 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
8014 return;
8017 case CALL_EXPR:
8018 if (CALL_EXPR_FN (t) == NULL_TREE)
8019 hstate.add_int (CALL_EXPR_IFN (t));
8020 break;
8022 case TARGET_EXPR:
8023 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
8024 Usually different TARGET_EXPRs just should use
8025 different temporaries in their slots. */
8026 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
8027 return;
8029 default:
8030 break;
8033 /* Don't hash the type, that can lead to having nodes which
8034 compare equal according to operand_equal_p, but which
8035 have different hash codes. */
8036 if (code == NON_LVALUE_EXPR)
8038 /* Make sure to include signness in the hash computation. */
8039 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
8040 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
8043 else if (commutative_tree_code (code))
8045 /* It's a commutative expression. We want to hash it the same
8046 however it appears. We do this by first hashing both operands
8047 and then rehashing based on the order of their independent
8048 hashes. */
8049 inchash::hash one, two;
8050 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8051 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8052 hstate.add_commutative (one, two);
8054 else
8055 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8056 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8057 i == 0 ? flags : sflags);
8059 return;
8065 /* Constructors for pointer, array and function types.
8066 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8067 constructed by language-dependent code, not here.) */
8069 /* Construct, lay out and return the type of pointers to TO_TYPE with
8070 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8071 reference all of memory. If such a type has already been
8072 constructed, reuse it. */
8074 tree
8075 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8076 bool can_alias_all)
8078 tree t;
8079 bool could_alias = can_alias_all;
8081 if (to_type == error_mark_node)
8082 return error_mark_node;
8084 /* If the pointed-to type has the may_alias attribute set, force
8085 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8086 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8087 can_alias_all = true;
8089 /* In some cases, languages will have things that aren't a POINTER_TYPE
8090 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8091 In that case, return that type without regard to the rest of our
8092 operands.
8094 ??? This is a kludge, but consistent with the way this function has
8095 always operated and there doesn't seem to be a good way to avoid this
8096 at the moment. */
8097 if (TYPE_POINTER_TO (to_type) != 0
8098 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8099 return TYPE_POINTER_TO (to_type);
8101 /* First, if we already have a type for pointers to TO_TYPE and it's
8102 the proper mode, use it. */
8103 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8104 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8105 return t;
8107 t = make_node (POINTER_TYPE);
8109 TREE_TYPE (t) = to_type;
8110 SET_TYPE_MODE (t, mode);
8111 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8112 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8113 TYPE_POINTER_TO (to_type) = t;
8115 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8116 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8117 SET_TYPE_STRUCTURAL_EQUALITY (t);
8118 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8119 TYPE_CANONICAL (t)
8120 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8121 mode, false);
8123 /* Lay out the type. This function has many callers that are concerned
8124 with expression-construction, and this simplifies them all. */
8125 layout_type (t);
8127 return t;
8130 /* By default build pointers in ptr_mode. */
8132 tree
8133 build_pointer_type (tree to_type)
8135 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8136 : TYPE_ADDR_SPACE (to_type);
8137 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8138 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8141 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8143 tree
8144 build_reference_type_for_mode (tree to_type, machine_mode mode,
8145 bool can_alias_all)
8147 tree t;
8148 bool could_alias = can_alias_all;
8150 if (to_type == error_mark_node)
8151 return error_mark_node;
8153 /* If the pointed-to type has the may_alias attribute set, force
8154 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8155 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8156 can_alias_all = true;
8158 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8159 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8160 In that case, return that type without regard to the rest of our
8161 operands.
8163 ??? This is a kludge, but consistent with the way this function has
8164 always operated and there doesn't seem to be a good way to avoid this
8165 at the moment. */
8166 if (TYPE_REFERENCE_TO (to_type) != 0
8167 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8168 return TYPE_REFERENCE_TO (to_type);
8170 /* First, if we already have a type for pointers to TO_TYPE and it's
8171 the proper mode, use it. */
8172 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8173 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8174 return t;
8176 t = make_node (REFERENCE_TYPE);
8178 TREE_TYPE (t) = to_type;
8179 SET_TYPE_MODE (t, mode);
8180 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8181 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8182 TYPE_REFERENCE_TO (to_type) = t;
8184 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8185 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8186 SET_TYPE_STRUCTURAL_EQUALITY (t);
8187 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8188 TYPE_CANONICAL (t)
8189 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8190 mode, false);
8192 layout_type (t);
8194 return t;
8198 /* Build the node for the type of references-to-TO_TYPE by default
8199 in ptr_mode. */
8201 tree
8202 build_reference_type (tree to_type)
8204 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8205 : TYPE_ADDR_SPACE (to_type);
8206 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8207 return build_reference_type_for_mode (to_type, pointer_mode, false);
8210 #define MAX_INT_CACHED_PREC \
8211 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8212 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8214 /* Builds a signed or unsigned integer type of precision PRECISION.
8215 Used for C bitfields whose precision does not match that of
8216 built-in target types. */
8217 tree
8218 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8219 int unsignedp)
8221 tree itype, ret;
8223 if (unsignedp)
8224 unsignedp = MAX_INT_CACHED_PREC + 1;
8226 if (precision <= MAX_INT_CACHED_PREC)
8228 itype = nonstandard_integer_type_cache[precision + unsignedp];
8229 if (itype)
8230 return itype;
8233 itype = make_node (INTEGER_TYPE);
8234 TYPE_PRECISION (itype) = precision;
8236 if (unsignedp)
8237 fixup_unsigned_type (itype);
8238 else
8239 fixup_signed_type (itype);
8241 inchash::hash hstate;
8242 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8243 ret = type_hash_canon (hstate.end (), itype);
8244 if (precision <= MAX_INT_CACHED_PREC)
8245 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8247 return ret;
8250 #define MAX_BOOL_CACHED_PREC \
8251 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8252 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8254 /* Builds a boolean type of precision PRECISION.
8255 Used for boolean vectors to choose proper vector element size. */
8256 tree
8257 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8259 tree type;
8261 if (precision <= MAX_BOOL_CACHED_PREC)
8263 type = nonstandard_boolean_type_cache[precision];
8264 if (type)
8265 return type;
8268 type = make_node (BOOLEAN_TYPE);
8269 TYPE_PRECISION (type) = precision;
8270 fixup_signed_type (type);
8272 if (precision <= MAX_INT_CACHED_PREC)
8273 nonstandard_boolean_type_cache[precision] = type;
8275 return type;
8278 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8279 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8280 is true, reuse such a type that has already been constructed. */
8282 static tree
8283 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8285 tree itype = make_node (INTEGER_TYPE);
8287 TREE_TYPE (itype) = type;
8289 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8290 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8292 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8293 SET_TYPE_MODE (itype, TYPE_MODE (type));
8294 TYPE_SIZE (itype) = TYPE_SIZE (type);
8295 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8296 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8297 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8298 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8300 if (!shared)
8301 return itype;
8303 if ((TYPE_MIN_VALUE (itype)
8304 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8305 || (TYPE_MAX_VALUE (itype)
8306 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8308 /* Since we cannot reliably merge this type, we need to compare it using
8309 structural equality checks. */
8310 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8311 return itype;
8314 hashval_t hash = type_hash_canon_hash (itype);
8315 itype = type_hash_canon (hash, itype);
8317 return itype;
8320 /* Wrapper around build_range_type_1 with SHARED set to true. */
8322 tree
8323 build_range_type (tree type, tree lowval, tree highval)
8325 return build_range_type_1 (type, lowval, highval, true);
8328 /* Wrapper around build_range_type_1 with SHARED set to false. */
8330 tree
8331 build_nonshared_range_type (tree type, tree lowval, tree highval)
8333 return build_range_type_1 (type, lowval, highval, false);
8336 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8337 MAXVAL should be the maximum value in the domain
8338 (one less than the length of the array).
8340 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8341 We don't enforce this limit, that is up to caller (e.g. language front end).
8342 The limit exists because the result is a signed type and we don't handle
8343 sizes that use more than one HOST_WIDE_INT. */
8345 tree
8346 build_index_type (tree maxval)
8348 return build_range_type (sizetype, size_zero_node, maxval);
8351 /* Return true if the debug information for TYPE, a subtype, should be emitted
8352 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8353 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8354 debug info and doesn't reflect the source code. */
8356 bool
8357 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8359 tree base_type = TREE_TYPE (type), low, high;
8361 /* Subrange types have a base type which is an integral type. */
8362 if (!INTEGRAL_TYPE_P (base_type))
8363 return false;
8365 /* Get the real bounds of the subtype. */
8366 if (lang_hooks.types.get_subrange_bounds)
8367 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8368 else
8370 low = TYPE_MIN_VALUE (type);
8371 high = TYPE_MAX_VALUE (type);
8374 /* If the type and its base type have the same representation and the same
8375 name, then the type is not a subrange but a copy of the base type. */
8376 if ((TREE_CODE (base_type) == INTEGER_TYPE
8377 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8378 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8379 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8380 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8381 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8382 return false;
8384 if (lowval)
8385 *lowval = low;
8386 if (highval)
8387 *highval = high;
8388 return true;
8391 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8392 and number of elements specified by the range of values of INDEX_TYPE.
8393 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8394 If SHARED is true, reuse such a type that has already been constructed. */
8396 static tree
8397 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8398 bool shared)
8400 tree t;
8402 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8404 error ("arrays of functions are not meaningful");
8405 elt_type = integer_type_node;
8408 t = make_node (ARRAY_TYPE);
8409 TREE_TYPE (t) = elt_type;
8410 TYPE_DOMAIN (t) = index_type;
8411 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8412 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8413 layout_type (t);
8415 /* If the element type is incomplete at this point we get marked for
8416 structural equality. Do not record these types in the canonical
8417 type hashtable. */
8418 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8419 return t;
8421 if (shared)
8423 hashval_t hash = type_hash_canon_hash (t);
8424 t = type_hash_canon (hash, t);
8427 if (TYPE_CANONICAL (t) == t)
8429 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8430 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8431 || in_lto_p)
8432 SET_TYPE_STRUCTURAL_EQUALITY (t);
8433 else if (TYPE_CANONICAL (elt_type) != elt_type
8434 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8435 TYPE_CANONICAL (t)
8436 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8437 index_type
8438 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8439 typeless_storage, shared);
8442 return t;
8445 /* Wrapper around build_array_type_1 with SHARED set to true. */
8447 tree
8448 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8450 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8453 /* Wrapper around build_array_type_1 with SHARED set to false. */
8455 tree
8456 build_nonshared_array_type (tree elt_type, tree index_type)
8458 return build_array_type_1 (elt_type, index_type, false, false);
8461 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8462 sizetype. */
8464 tree
8465 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8467 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8470 /* Recursively examines the array elements of TYPE, until a non-array
8471 element type is found. */
8473 tree
8474 strip_array_types (tree type)
8476 while (TREE_CODE (type) == ARRAY_TYPE)
8477 type = TREE_TYPE (type);
8479 return type;
8482 /* Computes the canonical argument types from the argument type list
8483 ARGTYPES.
8485 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8486 on entry to this function, or if any of the ARGTYPES are
8487 structural.
8489 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8490 true on entry to this function, or if any of the ARGTYPES are
8491 non-canonical.
8493 Returns a canonical argument list, which may be ARGTYPES when the
8494 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8495 true) or would not differ from ARGTYPES. */
8497 static tree
8498 maybe_canonicalize_argtypes (tree argtypes,
8499 bool *any_structural_p,
8500 bool *any_noncanonical_p)
8502 tree arg;
8503 bool any_noncanonical_argtypes_p = false;
8505 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8507 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8508 /* Fail gracefully by stating that the type is structural. */
8509 *any_structural_p = true;
8510 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8511 *any_structural_p = true;
8512 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8513 || TREE_PURPOSE (arg))
8514 /* If the argument has a default argument, we consider it
8515 non-canonical even though the type itself is canonical.
8516 That way, different variants of function and method types
8517 with default arguments will all point to the variant with
8518 no defaults as their canonical type. */
8519 any_noncanonical_argtypes_p = true;
8522 if (*any_structural_p)
8523 return argtypes;
8525 if (any_noncanonical_argtypes_p)
8527 /* Build the canonical list of argument types. */
8528 tree canon_argtypes = NULL_TREE;
8529 bool is_void = false;
8531 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8533 if (arg == void_list_node)
8534 is_void = true;
8535 else
8536 canon_argtypes = tree_cons (NULL_TREE,
8537 TYPE_CANONICAL (TREE_VALUE (arg)),
8538 canon_argtypes);
8541 canon_argtypes = nreverse (canon_argtypes);
8542 if (is_void)
8543 canon_argtypes = chainon (canon_argtypes, void_list_node);
8545 /* There is a non-canonical type. */
8546 *any_noncanonical_p = true;
8547 return canon_argtypes;
8550 /* The canonical argument types are the same as ARGTYPES. */
8551 return argtypes;
8554 /* Construct, lay out and return
8555 the type of functions returning type VALUE_TYPE
8556 given arguments of types ARG_TYPES.
8557 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8558 are data type nodes for the arguments of the function.
8559 If such a type has already been constructed, reuse it. */
8561 tree
8562 build_function_type (tree value_type, tree arg_types)
8564 tree t;
8565 inchash::hash hstate;
8566 bool any_structural_p, any_noncanonical_p;
8567 tree canon_argtypes;
8569 gcc_assert (arg_types != error_mark_node);
8571 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8573 error ("function return type cannot be function");
8574 value_type = integer_type_node;
8577 /* Make a node of the sort we want. */
8578 t = make_node (FUNCTION_TYPE);
8579 TREE_TYPE (t) = value_type;
8580 TYPE_ARG_TYPES (t) = arg_types;
8582 /* If we already have such a type, use the old one. */
8583 hashval_t hash = type_hash_canon_hash (t);
8584 t = type_hash_canon (hash, t);
8586 /* Set up the canonical type. */
8587 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8588 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8589 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8590 &any_structural_p,
8591 &any_noncanonical_p);
8592 if (any_structural_p)
8593 SET_TYPE_STRUCTURAL_EQUALITY (t);
8594 else if (any_noncanonical_p)
8595 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8596 canon_argtypes);
8598 if (!COMPLETE_TYPE_P (t))
8599 layout_type (t);
8600 return t;
8603 /* Build a function type. The RETURN_TYPE is the type returned by the
8604 function. If VAARGS is set, no void_type_node is appended to the
8605 list. ARGP must be always be terminated be a NULL_TREE. */
8607 static tree
8608 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8610 tree t, args, last;
8612 t = va_arg (argp, tree);
8613 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8614 args = tree_cons (NULL_TREE, t, args);
8616 if (vaargs)
8618 last = args;
8619 if (args != NULL_TREE)
8620 args = nreverse (args);
8621 gcc_assert (last != void_list_node);
8623 else if (args == NULL_TREE)
8624 args = void_list_node;
8625 else
8627 last = args;
8628 args = nreverse (args);
8629 TREE_CHAIN (last) = void_list_node;
8631 args = build_function_type (return_type, args);
8633 return args;
8636 /* Build a function type. The RETURN_TYPE is the type returned by the
8637 function. If additional arguments are provided, they are
8638 additional argument types. The list of argument types must always
8639 be terminated by NULL_TREE. */
8641 tree
8642 build_function_type_list (tree return_type, ...)
8644 tree args;
8645 va_list p;
8647 va_start (p, return_type);
8648 args = build_function_type_list_1 (false, return_type, p);
8649 va_end (p);
8650 return args;
8653 /* Build a variable argument function type. The RETURN_TYPE is the
8654 type returned by the function. If additional arguments are provided,
8655 they are additional argument types. The list of argument types must
8656 always be terminated by NULL_TREE. */
8658 tree
8659 build_varargs_function_type_list (tree return_type, ...)
8661 tree args;
8662 va_list p;
8664 va_start (p, return_type);
8665 args = build_function_type_list_1 (true, return_type, p);
8666 va_end (p);
8668 return args;
8671 /* Build a function type. RETURN_TYPE is the type returned by the
8672 function; VAARGS indicates whether the function takes varargs. The
8673 function takes N named arguments, the types of which are provided in
8674 ARG_TYPES. */
8676 static tree
8677 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8678 tree *arg_types)
8680 int i;
8681 tree t = vaargs ? NULL_TREE : void_list_node;
8683 for (i = n - 1; i >= 0; i--)
8684 t = tree_cons (NULL_TREE, arg_types[i], t);
8686 return build_function_type (return_type, t);
8689 /* Build a function type. RETURN_TYPE is the type returned by the
8690 function. The function takes N named arguments, the types of which
8691 are provided in ARG_TYPES. */
8693 tree
8694 build_function_type_array (tree return_type, int n, tree *arg_types)
8696 return build_function_type_array_1 (false, return_type, n, arg_types);
8699 /* Build a variable argument function type. RETURN_TYPE is the type
8700 returned by the function. The function takes N named arguments, the
8701 types of which are provided in ARG_TYPES. */
8703 tree
8704 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8706 return build_function_type_array_1 (true, return_type, n, arg_types);
8709 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8710 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8711 for the method. An implicit additional parameter (of type
8712 pointer-to-BASETYPE) is added to the ARGTYPES. */
8714 tree
8715 build_method_type_directly (tree basetype,
8716 tree rettype,
8717 tree argtypes)
8719 tree t;
8720 tree ptype;
8721 bool any_structural_p, any_noncanonical_p;
8722 tree canon_argtypes;
8724 /* Make a node of the sort we want. */
8725 t = make_node (METHOD_TYPE);
8727 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8728 TREE_TYPE (t) = rettype;
8729 ptype = build_pointer_type (basetype);
8731 /* The actual arglist for this function includes a "hidden" argument
8732 which is "this". Put it into the list of argument types. */
8733 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8734 TYPE_ARG_TYPES (t) = argtypes;
8736 /* If we already have such a type, use the old one. */
8737 hashval_t hash = type_hash_canon_hash (t);
8738 t = type_hash_canon (hash, t);
8740 /* Set up the canonical type. */
8741 any_structural_p
8742 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8743 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8744 any_noncanonical_p
8745 = (TYPE_CANONICAL (basetype) != basetype
8746 || TYPE_CANONICAL (rettype) != rettype);
8747 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8748 &any_structural_p,
8749 &any_noncanonical_p);
8750 if (any_structural_p)
8751 SET_TYPE_STRUCTURAL_EQUALITY (t);
8752 else if (any_noncanonical_p)
8753 TYPE_CANONICAL (t)
8754 = build_method_type_directly (TYPE_CANONICAL (basetype),
8755 TYPE_CANONICAL (rettype),
8756 canon_argtypes);
8757 if (!COMPLETE_TYPE_P (t))
8758 layout_type (t);
8760 return t;
8763 /* Construct, lay out and return the type of methods belonging to class
8764 BASETYPE and whose arguments and values are described by TYPE.
8765 If that type exists already, reuse it.
8766 TYPE must be a FUNCTION_TYPE node. */
8768 tree
8769 build_method_type (tree basetype, tree type)
8771 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8773 return build_method_type_directly (basetype,
8774 TREE_TYPE (type),
8775 TYPE_ARG_TYPES (type));
8778 /* Construct, lay out and return the type of offsets to a value
8779 of type TYPE, within an object of type BASETYPE.
8780 If a suitable offset type exists already, reuse it. */
8782 tree
8783 build_offset_type (tree basetype, tree type)
8785 tree t;
8787 /* Make a node of the sort we want. */
8788 t = make_node (OFFSET_TYPE);
8790 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8791 TREE_TYPE (t) = type;
8793 /* If we already have such a type, use the old one. */
8794 hashval_t hash = type_hash_canon_hash (t);
8795 t = type_hash_canon (hash, t);
8797 if (!COMPLETE_TYPE_P (t))
8798 layout_type (t);
8800 if (TYPE_CANONICAL (t) == t)
8802 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8803 || TYPE_STRUCTURAL_EQUALITY_P (type))
8804 SET_TYPE_STRUCTURAL_EQUALITY (t);
8805 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8806 || TYPE_CANONICAL (type) != type)
8807 TYPE_CANONICAL (t)
8808 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8809 TYPE_CANONICAL (type));
8812 return t;
8815 /* Create a complex type whose components are COMPONENT_TYPE.
8817 If NAMED is true, the type is given a TYPE_NAME. We do not always
8818 do so because this creates a DECL node and thus make the DECL_UIDs
8819 dependent on the type canonicalization hashtable, which is GC-ed,
8820 so the DECL_UIDs would not be stable wrt garbage collection. */
8822 tree
8823 build_complex_type (tree component_type, bool named)
8825 gcc_assert (INTEGRAL_TYPE_P (component_type)
8826 || SCALAR_FLOAT_TYPE_P (component_type)
8827 || FIXED_POINT_TYPE_P (component_type));
8829 /* Make a node of the sort we want. */
8830 tree probe = make_node (COMPLEX_TYPE);
8832 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8834 /* If we already have such a type, use the old one. */
8835 hashval_t hash = type_hash_canon_hash (probe);
8836 tree t = type_hash_canon (hash, probe);
8838 if (t == probe)
8840 /* We created a new type. The hash insertion will have laid
8841 out the type. We need to check the canonicalization and
8842 maybe set the name. */
8843 gcc_checking_assert (COMPLETE_TYPE_P (t)
8844 && !TYPE_NAME (t)
8845 && TYPE_CANONICAL (t) == t);
8847 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8848 SET_TYPE_STRUCTURAL_EQUALITY (t);
8849 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8850 TYPE_CANONICAL (t)
8851 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8853 /* We need to create a name, since complex is a fundamental type. */
8854 if (named)
8856 const char *name = NULL;
8858 if (TREE_TYPE (t) == char_type_node)
8859 name = "complex char";
8860 else if (TREE_TYPE (t) == signed_char_type_node)
8861 name = "complex signed char";
8862 else if (TREE_TYPE (t) == unsigned_char_type_node)
8863 name = "complex unsigned char";
8864 else if (TREE_TYPE (t) == short_integer_type_node)
8865 name = "complex short int";
8866 else if (TREE_TYPE (t) == short_unsigned_type_node)
8867 name = "complex short unsigned int";
8868 else if (TREE_TYPE (t) == integer_type_node)
8869 name = "complex int";
8870 else if (TREE_TYPE (t) == unsigned_type_node)
8871 name = "complex unsigned int";
8872 else if (TREE_TYPE (t) == long_integer_type_node)
8873 name = "complex long int";
8874 else if (TREE_TYPE (t) == long_unsigned_type_node)
8875 name = "complex long unsigned int";
8876 else if (TREE_TYPE (t) == long_long_integer_type_node)
8877 name = "complex long long int";
8878 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8879 name = "complex long long unsigned int";
8881 if (name != NULL)
8882 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8883 get_identifier (name), t);
8887 return build_qualified_type (t, TYPE_QUALS (component_type));
8890 /* If TYPE is a real or complex floating-point type and the target
8891 does not directly support arithmetic on TYPE then return the wider
8892 type to be used for arithmetic on TYPE. Otherwise, return
8893 NULL_TREE. */
8895 tree
8896 excess_precision_type (tree type)
8898 /* The target can give two different responses to the question of
8899 which excess precision mode it would like depending on whether we
8900 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8902 enum excess_precision_type requested_type
8903 = (flag_excess_precision == EXCESS_PRECISION_FAST
8904 ? EXCESS_PRECISION_TYPE_FAST
8905 : EXCESS_PRECISION_TYPE_STANDARD);
8907 enum flt_eval_method target_flt_eval_method
8908 = targetm.c.excess_precision (requested_type);
8910 /* The target should not ask for unpredictable float evaluation (though
8911 it might advertise that implicitly the evaluation is unpredictable,
8912 but we don't care about that here, it will have been reported
8913 elsewhere). If it does ask for unpredictable evaluation, we have
8914 nothing to do here. */
8915 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8917 /* Nothing to do. The target has asked for all types we know about
8918 to be computed with their native precision and range. */
8919 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8920 return NULL_TREE;
8922 /* The target will promote this type in a target-dependent way, so excess
8923 precision ought to leave it alone. */
8924 if (targetm.promoted_type (type) != NULL_TREE)
8925 return NULL_TREE;
8927 machine_mode float16_type_mode = (float16_type_node
8928 ? TYPE_MODE (float16_type_node)
8929 : VOIDmode);
8930 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8931 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8933 switch (TREE_CODE (type))
8935 case REAL_TYPE:
8937 machine_mode type_mode = TYPE_MODE (type);
8938 switch (target_flt_eval_method)
8940 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8941 if (type_mode == float16_type_mode)
8942 return float_type_node;
8943 break;
8944 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8945 if (type_mode == float16_type_mode
8946 || type_mode == float_type_mode)
8947 return double_type_node;
8948 break;
8949 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8950 if (type_mode == float16_type_mode
8951 || type_mode == float_type_mode
8952 || type_mode == double_type_mode)
8953 return long_double_type_node;
8954 break;
8955 default:
8956 gcc_unreachable ();
8958 break;
8960 case COMPLEX_TYPE:
8962 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8963 return NULL_TREE;
8964 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8965 switch (target_flt_eval_method)
8967 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8968 if (type_mode == float16_type_mode)
8969 return complex_float_type_node;
8970 break;
8971 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8972 if (type_mode == float16_type_mode
8973 || type_mode == float_type_mode)
8974 return complex_double_type_node;
8975 break;
8976 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8977 if (type_mode == float16_type_mode
8978 || type_mode == float_type_mode
8979 || type_mode == double_type_mode)
8980 return complex_long_double_type_node;
8981 break;
8982 default:
8983 gcc_unreachable ();
8985 break;
8987 default:
8988 break;
8991 return NULL_TREE;
8994 /* Return OP, stripped of any conversions to wider types as much as is safe.
8995 Converting the value back to OP's type makes a value equivalent to OP.
8997 If FOR_TYPE is nonzero, we return a value which, if converted to
8998 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
9000 OP must have integer, real or enumeral type. Pointers are not allowed!
9002 There are some cases where the obvious value we could return
9003 would regenerate to OP if converted to OP's type,
9004 but would not extend like OP to wider types.
9005 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
9006 For example, if OP is (unsigned short)(signed char)-1,
9007 we avoid returning (signed char)-1 if FOR_TYPE is int,
9008 even though extending that to an unsigned short would regenerate OP,
9009 since the result of extending (signed char)-1 to (int)
9010 is different from (int) OP. */
9012 tree
9013 get_unwidened (tree op, tree for_type)
9015 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
9016 tree type = TREE_TYPE (op);
9017 unsigned final_prec
9018 = TYPE_PRECISION (for_type != 0 ? for_type : type);
9019 int uns
9020 = (for_type != 0 && for_type != type
9021 && final_prec > TYPE_PRECISION (type)
9022 && TYPE_UNSIGNED (type));
9023 tree win = op;
9025 while (CONVERT_EXPR_P (op))
9027 int bitschange;
9029 /* TYPE_PRECISION on vector types has different meaning
9030 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
9031 so avoid them here. */
9032 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
9033 break;
9035 bitschange = TYPE_PRECISION (TREE_TYPE (op))
9036 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
9038 /* Truncations are many-one so cannot be removed.
9039 Unless we are later going to truncate down even farther. */
9040 if (bitschange < 0
9041 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
9042 break;
9044 /* See what's inside this conversion. If we decide to strip it,
9045 we will set WIN. */
9046 op = TREE_OPERAND (op, 0);
9048 /* If we have not stripped any zero-extensions (uns is 0),
9049 we can strip any kind of extension.
9050 If we have previously stripped a zero-extension,
9051 only zero-extensions can safely be stripped.
9052 Any extension can be stripped if the bits it would produce
9053 are all going to be discarded later by truncating to FOR_TYPE. */
9055 if (bitschange > 0)
9057 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
9058 win = op;
9059 /* TYPE_UNSIGNED says whether this is a zero-extension.
9060 Let's avoid computing it if it does not affect WIN
9061 and if UNS will not be needed again. */
9062 if ((uns
9063 || CONVERT_EXPR_P (op))
9064 && TYPE_UNSIGNED (TREE_TYPE (op)))
9066 uns = 1;
9067 win = op;
9072 /* If we finally reach a constant see if it fits in sth smaller and
9073 in that case convert it. */
9074 if (TREE_CODE (win) == INTEGER_CST)
9076 tree wtype = TREE_TYPE (win);
9077 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
9078 if (for_type)
9079 prec = MAX (prec, final_prec);
9080 if (prec < TYPE_PRECISION (wtype))
9082 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
9083 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
9084 win = fold_convert (t, win);
9088 return win;
9091 /* Return OP or a simpler expression for a narrower value
9092 which can be sign-extended or zero-extended to give back OP.
9093 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
9094 or 0 if the value should be sign-extended. */
9096 tree
9097 get_narrower (tree op, int *unsignedp_ptr)
9099 int uns = 0;
9100 int first = 1;
9101 tree win = op;
9102 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
9104 while (TREE_CODE (op) == NOP_EXPR)
9106 int bitschange
9107 = (TYPE_PRECISION (TREE_TYPE (op))
9108 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9110 /* Truncations are many-one so cannot be removed. */
9111 if (bitschange < 0)
9112 break;
9114 /* See what's inside this conversion. If we decide to strip it,
9115 we will set WIN. */
9117 if (bitschange > 0)
9119 op = TREE_OPERAND (op, 0);
9120 /* An extension: the outermost one can be stripped,
9121 but remember whether it is zero or sign extension. */
9122 if (first)
9123 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9124 /* Otherwise, if a sign extension has been stripped,
9125 only sign extensions can now be stripped;
9126 if a zero extension has been stripped, only zero-extensions. */
9127 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9128 break;
9129 first = 0;
9131 else /* bitschange == 0 */
9133 /* A change in nominal type can always be stripped, but we must
9134 preserve the unsignedness. */
9135 if (first)
9136 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9137 first = 0;
9138 op = TREE_OPERAND (op, 0);
9139 /* Keep trying to narrow, but don't assign op to win if it
9140 would turn an integral type into something else. */
9141 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9142 continue;
9145 win = op;
9148 if (TREE_CODE (op) == COMPONENT_REF
9149 /* Since type_for_size always gives an integer type. */
9150 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9151 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9152 /* Ensure field is laid out already. */
9153 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9154 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9156 unsigned HOST_WIDE_INT innerprec
9157 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9158 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9159 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9160 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9162 /* We can get this structure field in a narrower type that fits it,
9163 but the resulting extension to its nominal type (a fullword type)
9164 must satisfy the same conditions as for other extensions.
9166 Do this only for fields that are aligned (not bit-fields),
9167 because when bit-field insns will be used there is no
9168 advantage in doing this. */
9170 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9171 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9172 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9173 && type != 0)
9175 if (first)
9176 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9177 win = fold_convert (type, op);
9181 *unsignedp_ptr = uns;
9182 return win;
9185 /* Return true if integer constant C has a value that is permissible
9186 for TYPE, an integral type. */
9188 bool
9189 int_fits_type_p (const_tree c, const_tree type)
9191 tree type_low_bound, type_high_bound;
9192 bool ok_for_low_bound, ok_for_high_bound;
9193 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9195 /* Non-standard boolean types can have arbitrary precision but various
9196 transformations assume that they can only take values 0 and +/-1. */
9197 if (TREE_CODE (type) == BOOLEAN_TYPE)
9198 return wi::fits_to_boolean_p (wi::to_wide (c), type);
9200 retry:
9201 type_low_bound = TYPE_MIN_VALUE (type);
9202 type_high_bound = TYPE_MAX_VALUE (type);
9204 /* If at least one bound of the type is a constant integer, we can check
9205 ourselves and maybe make a decision. If no such decision is possible, but
9206 this type is a subtype, try checking against that. Otherwise, use
9207 fits_to_tree_p, which checks against the precision.
9209 Compute the status for each possibly constant bound, and return if we see
9210 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9211 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9212 for "constant known to fit". */
9214 /* Check if c >= type_low_bound. */
9215 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9217 if (tree_int_cst_lt (c, type_low_bound))
9218 return false;
9219 ok_for_low_bound = true;
9221 else
9222 ok_for_low_bound = false;
9224 /* Check if c <= type_high_bound. */
9225 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9227 if (tree_int_cst_lt (type_high_bound, c))
9228 return false;
9229 ok_for_high_bound = true;
9231 else
9232 ok_for_high_bound = false;
9234 /* If the constant fits both bounds, the result is known. */
9235 if (ok_for_low_bound && ok_for_high_bound)
9236 return true;
9238 /* Perform some generic filtering which may allow making a decision
9239 even if the bounds are not constant. First, negative integers
9240 never fit in unsigned types, */
9241 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9242 return false;
9244 /* Second, narrower types always fit in wider ones. */
9245 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9246 return true;
9248 /* Third, unsigned integers with top bit set never fit signed types. */
9249 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9251 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9252 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9254 /* When a tree_cst is converted to a wide-int, the precision
9255 is taken from the type. However, if the precision of the
9256 mode underneath the type is smaller than that, it is
9257 possible that the value will not fit. The test below
9258 fails if any bit is set between the sign bit of the
9259 underlying mode and the top bit of the type. */
9260 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9261 return false;
9263 else if (wi::neg_p (wi::to_wide (c)))
9264 return false;
9267 /* If we haven't been able to decide at this point, there nothing more we
9268 can check ourselves here. Look at the base type if we have one and it
9269 has the same precision. */
9270 if (TREE_CODE (type) == INTEGER_TYPE
9271 && TREE_TYPE (type) != 0
9272 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9274 type = TREE_TYPE (type);
9275 goto retry;
9278 /* Or to fits_to_tree_p, if nothing else. */
9279 return wi::fits_to_tree_p (wi::to_wide (c), type);
9282 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9283 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9284 represented (assuming two's-complement arithmetic) within the bit
9285 precision of the type are returned instead. */
9287 void
9288 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9290 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9291 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9292 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9293 else
9295 if (TYPE_UNSIGNED (type))
9296 mpz_set_ui (min, 0);
9297 else
9299 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9300 wi::to_mpz (mn, min, SIGNED);
9304 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9305 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9306 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9307 else
9309 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9310 wi::to_mpz (mn, max, TYPE_SIGN (type));
9314 /* Return true if VAR is an automatic variable. */
9316 bool
9317 auto_var_p (const_tree var)
9319 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9320 || TREE_CODE (var) == PARM_DECL)
9321 && ! TREE_STATIC (var))
9322 || TREE_CODE (var) == RESULT_DECL);
9325 /* Return true if VAR is an automatic variable defined in function FN. */
9327 bool
9328 auto_var_in_fn_p (const_tree var, const_tree fn)
9330 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9331 && (auto_var_p (var)
9332 || TREE_CODE (var) == LABEL_DECL));
9335 /* Subprogram of following function. Called by walk_tree.
9337 Return *TP if it is an automatic variable or parameter of the
9338 function passed in as DATA. */
9340 static tree
9341 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9343 tree fn = (tree) data;
9345 if (TYPE_P (*tp))
9346 *walk_subtrees = 0;
9348 else if (DECL_P (*tp)
9349 && auto_var_in_fn_p (*tp, fn))
9350 return *tp;
9352 return NULL_TREE;
9355 /* Returns true if T is, contains, or refers to a type with variable
9356 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9357 arguments, but not the return type. If FN is nonzero, only return
9358 true if a modifier of the type or position of FN is a variable or
9359 parameter inside FN.
9361 This concept is more general than that of C99 'variably modified types':
9362 in C99, a struct type is never variably modified because a VLA may not
9363 appear as a structure member. However, in GNU C code like:
9365 struct S { int i[f()]; };
9367 is valid, and other languages may define similar constructs. */
9369 bool
9370 variably_modified_type_p (tree type, tree fn)
9372 tree t;
9374 /* Test if T is either variable (if FN is zero) or an expression containing
9375 a variable in FN. If TYPE isn't gimplified, return true also if
9376 gimplify_one_sizepos would gimplify the expression into a local
9377 variable. */
9378 #define RETURN_TRUE_IF_VAR(T) \
9379 do { tree _t = (T); \
9380 if (_t != NULL_TREE \
9381 && _t != error_mark_node \
9382 && !CONSTANT_CLASS_P (_t) \
9383 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9384 && (!fn \
9385 || (!TYPE_SIZES_GIMPLIFIED (type) \
9386 && (TREE_CODE (_t) != VAR_DECL \
9387 && !CONTAINS_PLACEHOLDER_P (_t))) \
9388 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9389 return true; } while (0)
9391 if (type == error_mark_node)
9392 return false;
9394 /* If TYPE itself has variable size, it is variably modified. */
9395 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9396 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9398 switch (TREE_CODE (type))
9400 case POINTER_TYPE:
9401 case REFERENCE_TYPE:
9402 case VECTOR_TYPE:
9403 /* Ada can have pointer types refering to themselves indirectly. */
9404 if (TREE_VISITED (type))
9405 return false;
9406 TREE_VISITED (type) = true;
9407 if (variably_modified_type_p (TREE_TYPE (type), fn))
9409 TREE_VISITED (type) = false;
9410 return true;
9412 TREE_VISITED (type) = false;
9413 break;
9415 case FUNCTION_TYPE:
9416 case METHOD_TYPE:
9417 /* If TYPE is a function type, it is variably modified if the
9418 return type is variably modified. */
9419 if (variably_modified_type_p (TREE_TYPE (type), fn))
9420 return true;
9421 break;
9423 case INTEGER_TYPE:
9424 case REAL_TYPE:
9425 case FIXED_POINT_TYPE:
9426 case ENUMERAL_TYPE:
9427 case BOOLEAN_TYPE:
9428 /* Scalar types are variably modified if their end points
9429 aren't constant. */
9430 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9431 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9432 break;
9434 case RECORD_TYPE:
9435 case UNION_TYPE:
9436 case QUAL_UNION_TYPE:
9437 /* We can't see if any of the fields are variably-modified by the
9438 definition we normally use, since that would produce infinite
9439 recursion via pointers. */
9440 /* This is variably modified if some field's type is. */
9441 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9442 if (TREE_CODE (t) == FIELD_DECL)
9444 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9445 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9446 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9448 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9449 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9451 break;
9453 case ARRAY_TYPE:
9454 /* Do not call ourselves to avoid infinite recursion. This is
9455 variably modified if the element type is. */
9456 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9457 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9458 break;
9460 default:
9461 break;
9464 /* The current language may have other cases to check, but in general,
9465 all other types are not variably modified. */
9466 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9468 #undef RETURN_TRUE_IF_VAR
9471 /* Given a DECL or TYPE, return the scope in which it was declared, or
9472 NULL_TREE if there is no containing scope. */
9474 tree
9475 get_containing_scope (const_tree t)
9477 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9480 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9482 const_tree
9483 get_ultimate_context (const_tree decl)
9485 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9487 if (TREE_CODE (decl) == BLOCK)
9488 decl = BLOCK_SUPERCONTEXT (decl);
9489 else
9490 decl = get_containing_scope (decl);
9492 return decl;
9495 /* Return the innermost context enclosing DECL that is
9496 a FUNCTION_DECL, or zero if none. */
9498 tree
9499 decl_function_context (const_tree decl)
9501 tree context;
9503 if (TREE_CODE (decl) == ERROR_MARK)
9504 return 0;
9506 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9507 where we look up the function at runtime. Such functions always take
9508 a first argument of type 'pointer to real context'.
9510 C++ should really be fixed to use DECL_CONTEXT for the real context,
9511 and use something else for the "virtual context". */
9512 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9513 context
9514 = TYPE_MAIN_VARIANT
9515 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9516 else
9517 context = DECL_CONTEXT (decl);
9519 while (context && TREE_CODE (context) != FUNCTION_DECL)
9521 if (TREE_CODE (context) == BLOCK)
9522 context = BLOCK_SUPERCONTEXT (context);
9523 else
9524 context = get_containing_scope (context);
9527 return context;
9530 /* Return the innermost context enclosing DECL that is
9531 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9532 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9534 tree
9535 decl_type_context (const_tree decl)
9537 tree context = DECL_CONTEXT (decl);
9539 while (context)
9540 switch (TREE_CODE (context))
9542 case NAMESPACE_DECL:
9543 case TRANSLATION_UNIT_DECL:
9544 return NULL_TREE;
9546 case RECORD_TYPE:
9547 case UNION_TYPE:
9548 case QUAL_UNION_TYPE:
9549 return context;
9551 case TYPE_DECL:
9552 case FUNCTION_DECL:
9553 context = DECL_CONTEXT (context);
9554 break;
9556 case BLOCK:
9557 context = BLOCK_SUPERCONTEXT (context);
9558 break;
9560 default:
9561 gcc_unreachable ();
9564 return NULL_TREE;
9567 /* CALL is a CALL_EXPR. Return the declaration for the function
9568 called, or NULL_TREE if the called function cannot be
9569 determined. */
9571 tree
9572 get_callee_fndecl (const_tree call)
9574 tree addr;
9576 if (call == error_mark_node)
9577 return error_mark_node;
9579 /* It's invalid to call this function with anything but a
9580 CALL_EXPR. */
9581 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9583 /* The first operand to the CALL is the address of the function
9584 called. */
9585 addr = CALL_EXPR_FN (call);
9587 /* If there is no function, return early. */
9588 if (addr == NULL_TREE)
9589 return NULL_TREE;
9591 STRIP_NOPS (addr);
9593 /* If this is a readonly function pointer, extract its initial value. */
9594 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9595 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9596 && DECL_INITIAL (addr))
9597 addr = DECL_INITIAL (addr);
9599 /* If the address is just `&f' for some function `f', then we know
9600 that `f' is being called. */
9601 if (TREE_CODE (addr) == ADDR_EXPR
9602 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9603 return TREE_OPERAND (addr, 0);
9605 /* We couldn't figure out what was being called. */
9606 return NULL_TREE;
9609 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9610 return the associated function code, otherwise return CFN_LAST. */
9612 combined_fn
9613 get_call_combined_fn (const_tree call)
9615 /* It's invalid to call this function with anything but a CALL_EXPR. */
9616 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9618 if (!CALL_EXPR_FN (call))
9619 return as_combined_fn (CALL_EXPR_IFN (call));
9621 tree fndecl = get_callee_fndecl (call);
9622 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9623 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9625 return CFN_LAST;
9628 /* Comparator of indices based on tree_node_counts. */
9630 static int
9631 tree_nodes_cmp (const void *p1, const void *p2)
9633 const unsigned *n1 = (const unsigned *)p1;
9634 const unsigned *n2 = (const unsigned *)p2;
9636 return tree_node_counts[*n1] - tree_node_counts[*n2];
9639 /* Comparator of indices based on tree_code_counts. */
9641 static int
9642 tree_codes_cmp (const void *p1, const void *p2)
9644 const unsigned *n1 = (const unsigned *)p1;
9645 const unsigned *n2 = (const unsigned *)p2;
9647 return tree_code_counts[*n1] - tree_code_counts[*n2];
9650 #define TREE_MEM_USAGE_SPACES 40
9652 /* Print debugging information about tree nodes generated during the compile,
9653 and any language-specific information. */
9655 void
9656 dump_tree_statistics (void)
9658 if (GATHER_STATISTICS)
9660 uint64_t total_nodes, total_bytes;
9661 fprintf (stderr, "\nKind Nodes Bytes\n");
9662 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9663 total_nodes = total_bytes = 0;
9666 auto_vec<unsigned> indices (all_kinds);
9667 for (unsigned i = 0; i < all_kinds; i++)
9668 indices.quick_push (i);
9669 indices.qsort (tree_nodes_cmp);
9671 for (unsigned i = 0; i < (int) all_kinds; i++)
9673 unsigned j = indices[i];
9674 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9675 tree_node_kind_names[i], SIZE_AMOUNT (tree_node_counts[j]),
9676 SIZE_AMOUNT (tree_node_sizes[j]));
9677 total_nodes += tree_node_counts[j];
9678 total_bytes += tree_node_sizes[j];
9680 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9681 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9682 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9683 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9687 fprintf (stderr, "Code Nodes\n");
9688 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9690 auto_vec<unsigned> indices (MAX_TREE_CODES);
9691 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9692 indices.quick_push (i);
9693 indices.qsort (tree_codes_cmp);
9695 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9697 unsigned j = indices[i];
9698 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9699 get_tree_code_name ((enum tree_code) j),
9700 SIZE_AMOUNT (tree_code_counts[j]));
9702 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9703 fprintf (stderr, "\n");
9704 ssanames_print_statistics ();
9705 fprintf (stderr, "\n");
9706 phinodes_print_statistics ();
9707 fprintf (stderr, "\n");
9710 else
9711 fprintf (stderr, "(No per-node statistics)\n");
9713 print_type_hash_statistics ();
9714 print_debug_expr_statistics ();
9715 print_value_expr_statistics ();
9716 lang_hooks.print_statistics ();
9719 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9721 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9723 unsigned
9724 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9726 /* This relies on the raw feedback's top 4 bits being zero. */
9727 #define FEEDBACK(X) ((X) * 0x04c11db7)
9728 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9729 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9730 static const unsigned syndromes[16] =
9732 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9733 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9734 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9735 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9737 #undef FEEDBACK
9738 #undef SYNDROME
9740 value <<= (32 - bytes * 8);
9741 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9743 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9745 chksum = (chksum << 4) ^ feedback;
9748 return chksum;
9751 /* Generate a crc32 of a string. */
9753 unsigned
9754 crc32_string (unsigned chksum, const char *string)
9757 chksum = crc32_byte (chksum, *string);
9758 while (*string++);
9759 return chksum;
9762 /* P is a string that will be used in a symbol. Mask out any characters
9763 that are not valid in that context. */
9765 void
9766 clean_symbol_name (char *p)
9768 for (; *p; p++)
9769 if (! (ISALNUM (*p)
9770 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9771 || *p == '$'
9772 #endif
9773 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9774 || *p == '.'
9775 #endif
9777 *p = '_';
9780 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9782 /* Create a unique anonymous identifier. The identifier is still a
9783 valid assembly label. */
9785 tree
9786 make_anon_name ()
9788 const char *fmt =
9789 #if !defined (NO_DOT_IN_LABEL)
9791 #elif !defined (NO_DOLLAR_IN_LABEL)
9793 #else
9795 #endif
9796 "_anon_%d";
9798 char buf[24];
9799 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9800 gcc_checking_assert (len < int (sizeof (buf)));
9802 tree id = get_identifier_with_length (buf, len);
9803 IDENTIFIER_ANON_P (id) = true;
9805 return id;
9808 /* Generate a name for a special-purpose function.
9809 The generated name may need to be unique across the whole link.
9810 Changes to this function may also require corresponding changes to
9811 xstrdup_mask_random.
9812 TYPE is some string to identify the purpose of this function to the
9813 linker or collect2; it must start with an uppercase letter,
9814 one of:
9815 I - for constructors
9816 D - for destructors
9817 N - for C++ anonymous namespaces
9818 F - for DWARF unwind frame information. */
9820 tree
9821 get_file_function_name (const char *type)
9823 char *buf;
9824 const char *p;
9825 char *q;
9827 /* If we already have a name we know to be unique, just use that. */
9828 if (first_global_object_name)
9829 p = q = ASTRDUP (first_global_object_name);
9830 /* If the target is handling the constructors/destructors, they
9831 will be local to this file and the name is only necessary for
9832 debugging purposes.
9833 We also assign sub_I and sub_D sufixes to constructors called from
9834 the global static constructors. These are always local. */
9835 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9836 || (strncmp (type, "sub_", 4) == 0
9837 && (type[4] == 'I' || type[4] == 'D')))
9839 const char *file = main_input_filename;
9840 if (! file)
9841 file = LOCATION_FILE (input_location);
9842 /* Just use the file's basename, because the full pathname
9843 might be quite long. */
9844 p = q = ASTRDUP (lbasename (file));
9846 else
9848 /* Otherwise, the name must be unique across the entire link.
9849 We don't have anything that we know to be unique to this translation
9850 unit, so use what we do have and throw in some randomness. */
9851 unsigned len;
9852 const char *name = weak_global_object_name;
9853 const char *file = main_input_filename;
9855 if (! name)
9856 name = "";
9857 if (! file)
9858 file = LOCATION_FILE (input_location);
9860 len = strlen (file);
9861 q = (char *) alloca (9 + 19 + len + 1);
9862 memcpy (q, file, len + 1);
9864 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9865 crc32_string (0, name), get_random_seed (false));
9867 p = q;
9870 clean_symbol_name (q);
9871 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9872 + strlen (type));
9874 /* Set up the name of the file-level functions we may need.
9875 Use a global object (which is already required to be unique over
9876 the program) rather than the file name (which imposes extra
9877 constraints). */
9878 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9880 return get_identifier (buf);
9883 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9885 /* Complain that the tree code of NODE does not match the expected 0
9886 terminated list of trailing codes. The trailing code list can be
9887 empty, for a more vague error message. FILE, LINE, and FUNCTION
9888 are of the caller. */
9890 void
9891 tree_check_failed (const_tree node, const char *file,
9892 int line, const char *function, ...)
9894 va_list args;
9895 const char *buffer;
9896 unsigned length = 0;
9897 enum tree_code code;
9899 va_start (args, function);
9900 while ((code = (enum tree_code) va_arg (args, int)))
9901 length += 4 + strlen (get_tree_code_name (code));
9902 va_end (args);
9903 if (length)
9905 char *tmp;
9906 va_start (args, function);
9907 length += strlen ("expected ");
9908 buffer = tmp = (char *) alloca (length);
9909 length = 0;
9910 while ((code = (enum tree_code) va_arg (args, int)))
9912 const char *prefix = length ? " or " : "expected ";
9914 strcpy (tmp + length, prefix);
9915 length += strlen (prefix);
9916 strcpy (tmp + length, get_tree_code_name (code));
9917 length += strlen (get_tree_code_name (code));
9919 va_end (args);
9921 else
9922 buffer = "unexpected node";
9924 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9925 buffer, get_tree_code_name (TREE_CODE (node)),
9926 function, trim_filename (file), line);
9929 /* Complain that the tree code of NODE does match the expected 0
9930 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9931 the caller. */
9933 void
9934 tree_not_check_failed (const_tree node, const char *file,
9935 int line, const char *function, ...)
9937 va_list args;
9938 char *buffer;
9939 unsigned length = 0;
9940 enum tree_code code;
9942 va_start (args, function);
9943 while ((code = (enum tree_code) va_arg (args, int)))
9944 length += 4 + strlen (get_tree_code_name (code));
9945 va_end (args);
9946 va_start (args, function);
9947 buffer = (char *) alloca (length);
9948 length = 0;
9949 while ((code = (enum tree_code) va_arg (args, int)))
9951 if (length)
9953 strcpy (buffer + length, " or ");
9954 length += 4;
9956 strcpy (buffer + length, get_tree_code_name (code));
9957 length += strlen (get_tree_code_name (code));
9959 va_end (args);
9961 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9962 buffer, get_tree_code_name (TREE_CODE (node)),
9963 function, trim_filename (file), line);
9966 /* Similar to tree_check_failed, except that we check for a class of tree
9967 code, given in CL. */
9969 void
9970 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9971 const char *file, int line, const char *function)
9973 internal_error
9974 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9975 TREE_CODE_CLASS_STRING (cl),
9976 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9977 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9980 /* Similar to tree_check_failed, except that instead of specifying a
9981 dozen codes, use the knowledge that they're all sequential. */
9983 void
9984 tree_range_check_failed (const_tree node, const char *file, int line,
9985 const char *function, enum tree_code c1,
9986 enum tree_code c2)
9988 char *buffer;
9989 unsigned length = 0;
9990 unsigned int c;
9992 for (c = c1; c <= c2; ++c)
9993 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9995 length += strlen ("expected ");
9996 buffer = (char *) alloca (length);
9997 length = 0;
9999 for (c = c1; c <= c2; ++c)
10001 const char *prefix = length ? " or " : "expected ";
10003 strcpy (buffer + length, prefix);
10004 length += strlen (prefix);
10005 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
10006 length += strlen (get_tree_code_name ((enum tree_code) c));
10009 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10010 buffer, get_tree_code_name (TREE_CODE (node)),
10011 function, trim_filename (file), line);
10015 /* Similar to tree_check_failed, except that we check that a tree does
10016 not have the specified code, given in CL. */
10018 void
10019 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
10020 const char *file, int line, const char *function)
10022 internal_error
10023 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
10024 TREE_CODE_CLASS_STRING (cl),
10025 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
10026 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10030 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
10032 void
10033 omp_clause_check_failed (const_tree node, const char *file, int line,
10034 const char *function, enum omp_clause_code code)
10036 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
10037 "in %s, at %s:%d",
10038 omp_clause_code_name[code],
10039 get_tree_code_name (TREE_CODE (node)),
10040 function, trim_filename (file), line);
10044 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
10046 void
10047 omp_clause_range_check_failed (const_tree node, const char *file, int line,
10048 const char *function, enum omp_clause_code c1,
10049 enum omp_clause_code c2)
10051 char *buffer;
10052 unsigned length = 0;
10053 unsigned int c;
10055 for (c = c1; c <= c2; ++c)
10056 length += 4 + strlen (omp_clause_code_name[c]);
10058 length += strlen ("expected ");
10059 buffer = (char *) alloca (length);
10060 length = 0;
10062 for (c = c1; c <= c2; ++c)
10064 const char *prefix = length ? " or " : "expected ";
10066 strcpy (buffer + length, prefix);
10067 length += strlen (prefix);
10068 strcpy (buffer + length, omp_clause_code_name[c]);
10069 length += strlen (omp_clause_code_name[c]);
10072 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10073 buffer, omp_clause_code_name[TREE_CODE (node)],
10074 function, trim_filename (file), line);
10078 #undef DEFTREESTRUCT
10079 #define DEFTREESTRUCT(VAL, NAME) NAME,
10081 static const char *ts_enum_names[] = {
10082 #include "treestruct.def"
10084 #undef DEFTREESTRUCT
10086 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
10088 /* Similar to tree_class_check_failed, except that we check for
10089 whether CODE contains the tree structure identified by EN. */
10091 void
10092 tree_contains_struct_check_failed (const_tree node,
10093 const enum tree_node_structure_enum en,
10094 const char *file, int line,
10095 const char *function)
10097 internal_error
10098 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
10099 TS_ENUM_NAME (en),
10100 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10104 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10105 (dynamically sized) vector. */
10107 void
10108 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10109 const char *function)
10111 internal_error
10112 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
10113 "at %s:%d",
10114 idx + 1, len, function, trim_filename (file), line);
10117 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10118 (dynamically sized) vector. */
10120 void
10121 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10122 const char *function)
10124 internal_error
10125 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
10126 idx + 1, len, function, trim_filename (file), line);
10129 /* Similar to above, except that the check is for the bounds of the operand
10130 vector of an expression node EXP. */
10132 void
10133 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10134 int line, const char *function)
10136 enum tree_code code = TREE_CODE (exp);
10137 internal_error
10138 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10139 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10140 function, trim_filename (file), line);
10143 /* Similar to above, except that the check is for the number of
10144 operands of an OMP_CLAUSE node. */
10146 void
10147 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10148 int line, const char *function)
10150 internal_error
10151 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
10152 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10153 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10154 trim_filename (file), line);
10156 #endif /* ENABLE_TREE_CHECKING */
10158 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
10159 and mapped to the machine mode MODE. Initialize its fields and build
10160 the information necessary for debugging output. */
10162 static tree
10163 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
10165 tree t;
10166 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10168 t = make_node (VECTOR_TYPE);
10169 TREE_TYPE (t) = mv_innertype;
10170 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10171 SET_TYPE_MODE (t, mode);
10173 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10174 SET_TYPE_STRUCTURAL_EQUALITY (t);
10175 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10176 || mode != VOIDmode)
10177 && !VECTOR_BOOLEAN_TYPE_P (t))
10178 TYPE_CANONICAL (t)
10179 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10181 layout_type (t);
10183 hashval_t hash = type_hash_canon_hash (t);
10184 t = type_hash_canon (hash, t);
10186 /* We have built a main variant, based on the main variant of the
10187 inner type. Use it to build the variant we return. */
10188 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10189 && TREE_TYPE (t) != innertype)
10190 return build_type_attribute_qual_variant (t,
10191 TYPE_ATTRIBUTES (innertype),
10192 TYPE_QUALS (innertype));
10194 return t;
10197 static tree
10198 make_or_reuse_type (unsigned size, int unsignedp)
10200 int i;
10202 if (size == INT_TYPE_SIZE)
10203 return unsignedp ? unsigned_type_node : integer_type_node;
10204 if (size == CHAR_TYPE_SIZE)
10205 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10206 if (size == SHORT_TYPE_SIZE)
10207 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10208 if (size == LONG_TYPE_SIZE)
10209 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10210 if (size == LONG_LONG_TYPE_SIZE)
10211 return (unsignedp ? long_long_unsigned_type_node
10212 : long_long_integer_type_node);
10214 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10215 if (size == int_n_data[i].bitsize
10216 && int_n_enabled_p[i])
10217 return (unsignedp ? int_n_trees[i].unsigned_type
10218 : int_n_trees[i].signed_type);
10220 if (unsignedp)
10221 return make_unsigned_type (size);
10222 else
10223 return make_signed_type (size);
10226 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10228 static tree
10229 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10231 if (satp)
10233 if (size == SHORT_FRACT_TYPE_SIZE)
10234 return unsignedp ? sat_unsigned_short_fract_type_node
10235 : sat_short_fract_type_node;
10236 if (size == FRACT_TYPE_SIZE)
10237 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10238 if (size == LONG_FRACT_TYPE_SIZE)
10239 return unsignedp ? sat_unsigned_long_fract_type_node
10240 : sat_long_fract_type_node;
10241 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10242 return unsignedp ? sat_unsigned_long_long_fract_type_node
10243 : sat_long_long_fract_type_node;
10245 else
10247 if (size == SHORT_FRACT_TYPE_SIZE)
10248 return unsignedp ? unsigned_short_fract_type_node
10249 : short_fract_type_node;
10250 if (size == FRACT_TYPE_SIZE)
10251 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10252 if (size == LONG_FRACT_TYPE_SIZE)
10253 return unsignedp ? unsigned_long_fract_type_node
10254 : long_fract_type_node;
10255 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10256 return unsignedp ? unsigned_long_long_fract_type_node
10257 : long_long_fract_type_node;
10260 return make_fract_type (size, unsignedp, satp);
10263 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10265 static tree
10266 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10268 if (satp)
10270 if (size == SHORT_ACCUM_TYPE_SIZE)
10271 return unsignedp ? sat_unsigned_short_accum_type_node
10272 : sat_short_accum_type_node;
10273 if (size == ACCUM_TYPE_SIZE)
10274 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10275 if (size == LONG_ACCUM_TYPE_SIZE)
10276 return unsignedp ? sat_unsigned_long_accum_type_node
10277 : sat_long_accum_type_node;
10278 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10279 return unsignedp ? sat_unsigned_long_long_accum_type_node
10280 : sat_long_long_accum_type_node;
10282 else
10284 if (size == SHORT_ACCUM_TYPE_SIZE)
10285 return unsignedp ? unsigned_short_accum_type_node
10286 : short_accum_type_node;
10287 if (size == ACCUM_TYPE_SIZE)
10288 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10289 if (size == LONG_ACCUM_TYPE_SIZE)
10290 return unsignedp ? unsigned_long_accum_type_node
10291 : long_accum_type_node;
10292 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10293 return unsignedp ? unsigned_long_long_accum_type_node
10294 : long_long_accum_type_node;
10297 return make_accum_type (size, unsignedp, satp);
10301 /* Create an atomic variant node for TYPE. This routine is called
10302 during initialization of data types to create the 5 basic atomic
10303 types. The generic build_variant_type function requires these to
10304 already be set up in order to function properly, so cannot be
10305 called from there. If ALIGN is non-zero, then ensure alignment is
10306 overridden to this value. */
10308 static tree
10309 build_atomic_base (tree type, unsigned int align)
10311 tree t;
10313 /* Make sure its not already registered. */
10314 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10315 return t;
10317 t = build_variant_type_copy (type);
10318 set_type_quals (t, TYPE_QUAL_ATOMIC);
10320 if (align)
10321 SET_TYPE_ALIGN (t, align);
10323 return t;
10326 /* Information about the _FloatN and _FloatNx types. This must be in
10327 the same order as the corresponding TI_* enum values. */
10328 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10330 { 16, false },
10331 { 32, false },
10332 { 64, false },
10333 { 128, false },
10334 { 32, true },
10335 { 64, true },
10336 { 128, true },
10340 /* Create nodes for all integer types (and error_mark_node) using the sizes
10341 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10343 void
10344 build_common_tree_nodes (bool signed_char)
10346 int i;
10348 error_mark_node = make_node (ERROR_MARK);
10349 TREE_TYPE (error_mark_node) = error_mark_node;
10351 initialize_sizetypes ();
10353 /* Define both `signed char' and `unsigned char'. */
10354 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10355 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10356 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10357 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10359 /* Define `char', which is like either `signed char' or `unsigned char'
10360 but not the same as either. */
10361 char_type_node
10362 = (signed_char
10363 ? make_signed_type (CHAR_TYPE_SIZE)
10364 : make_unsigned_type (CHAR_TYPE_SIZE));
10365 TYPE_STRING_FLAG (char_type_node) = 1;
10367 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10368 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10369 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10370 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10371 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10372 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10373 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10374 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10376 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10378 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10379 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10381 if (int_n_enabled_p[i])
10383 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10384 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10388 /* Define a boolean type. This type only represents boolean values but
10389 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10390 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10391 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10392 TYPE_PRECISION (boolean_type_node) = 1;
10393 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10395 /* Define what type to use for size_t. */
10396 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10397 size_type_node = unsigned_type_node;
10398 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10399 size_type_node = long_unsigned_type_node;
10400 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10401 size_type_node = long_long_unsigned_type_node;
10402 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10403 size_type_node = short_unsigned_type_node;
10404 else
10406 int i;
10408 size_type_node = NULL_TREE;
10409 for (i = 0; i < NUM_INT_N_ENTS; i++)
10410 if (int_n_enabled_p[i])
10412 char name[50], altname[50];
10413 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10414 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10416 if (strcmp (name, SIZE_TYPE) == 0
10417 || strcmp (altname, SIZE_TYPE) == 0)
10419 size_type_node = int_n_trees[i].unsigned_type;
10422 if (size_type_node == NULL_TREE)
10423 gcc_unreachable ();
10426 /* Define what type to use for ptrdiff_t. */
10427 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10428 ptrdiff_type_node = integer_type_node;
10429 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10430 ptrdiff_type_node = long_integer_type_node;
10431 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10432 ptrdiff_type_node = long_long_integer_type_node;
10433 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10434 ptrdiff_type_node = short_integer_type_node;
10435 else
10437 ptrdiff_type_node = NULL_TREE;
10438 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10439 if (int_n_enabled_p[i])
10441 char name[50], altname[50];
10442 sprintf (name, "__int%d", int_n_data[i].bitsize);
10443 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10445 if (strcmp (name, PTRDIFF_TYPE) == 0
10446 || strcmp (altname, PTRDIFF_TYPE) == 0)
10447 ptrdiff_type_node = int_n_trees[i].signed_type;
10449 if (ptrdiff_type_node == NULL_TREE)
10450 gcc_unreachable ();
10453 /* Fill in the rest of the sized types. Reuse existing type nodes
10454 when possible. */
10455 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10456 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10457 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10458 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10459 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10461 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10462 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10463 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10464 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10465 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10467 /* Don't call build_qualified type for atomics. That routine does
10468 special processing for atomics, and until they are initialized
10469 it's better not to make that call.
10471 Check to see if there is a target override for atomic types. */
10473 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10474 targetm.atomic_align_for_mode (QImode));
10475 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10476 targetm.atomic_align_for_mode (HImode));
10477 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10478 targetm.atomic_align_for_mode (SImode));
10479 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10480 targetm.atomic_align_for_mode (DImode));
10481 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10482 targetm.atomic_align_for_mode (TImode));
10484 access_public_node = get_identifier ("public");
10485 access_protected_node = get_identifier ("protected");
10486 access_private_node = get_identifier ("private");
10488 /* Define these next since types below may used them. */
10489 integer_zero_node = build_int_cst (integer_type_node, 0);
10490 integer_one_node = build_int_cst (integer_type_node, 1);
10491 integer_three_node = build_int_cst (integer_type_node, 3);
10492 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10494 size_zero_node = size_int (0);
10495 size_one_node = size_int (1);
10496 bitsize_zero_node = bitsize_int (0);
10497 bitsize_one_node = bitsize_int (1);
10498 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10500 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10501 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10503 void_type_node = make_node (VOID_TYPE);
10504 layout_type (void_type_node);
10506 /* We are not going to have real types in C with less than byte alignment,
10507 so we might as well not have any types that claim to have it. */
10508 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10509 TYPE_USER_ALIGN (void_type_node) = 0;
10511 void_node = make_node (VOID_CST);
10512 TREE_TYPE (void_node) = void_type_node;
10514 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10515 layout_type (TREE_TYPE (null_pointer_node));
10517 ptr_type_node = build_pointer_type (void_type_node);
10518 const_ptr_type_node
10519 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10520 for (unsigned i = 0;
10521 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10522 ++i)
10523 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10525 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10527 float_type_node = make_node (REAL_TYPE);
10528 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10529 layout_type (float_type_node);
10531 double_type_node = make_node (REAL_TYPE);
10532 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10533 layout_type (double_type_node);
10535 long_double_type_node = make_node (REAL_TYPE);
10536 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10537 layout_type (long_double_type_node);
10539 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10541 int n = floatn_nx_types[i].n;
10542 bool extended = floatn_nx_types[i].extended;
10543 scalar_float_mode mode;
10544 if (!targetm.floatn_mode (n, extended).exists (&mode))
10545 continue;
10546 int precision = GET_MODE_PRECISION (mode);
10547 /* Work around the rs6000 KFmode having precision 113 not
10548 128. */
10549 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10550 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10551 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10552 if (!extended)
10553 gcc_assert (min_precision == n);
10554 if (precision < min_precision)
10555 precision = min_precision;
10556 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10557 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10558 layout_type (FLOATN_NX_TYPE_NODE (i));
10559 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10562 float_ptr_type_node = build_pointer_type (float_type_node);
10563 double_ptr_type_node = build_pointer_type (double_type_node);
10564 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10565 integer_ptr_type_node = build_pointer_type (integer_type_node);
10567 /* Fixed size integer types. */
10568 uint16_type_node = make_or_reuse_type (16, 1);
10569 uint32_type_node = make_or_reuse_type (32, 1);
10570 uint64_type_node = make_or_reuse_type (64, 1);
10572 /* Decimal float types. */
10573 dfloat32_type_node = make_node (REAL_TYPE);
10574 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10575 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10576 layout_type (dfloat32_type_node);
10577 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10579 dfloat64_type_node = make_node (REAL_TYPE);
10580 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10581 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10582 layout_type (dfloat64_type_node);
10583 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10585 dfloat128_type_node = make_node (REAL_TYPE);
10586 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10587 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10588 layout_type (dfloat128_type_node);
10589 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10591 complex_integer_type_node = build_complex_type (integer_type_node, true);
10592 complex_float_type_node = build_complex_type (float_type_node, true);
10593 complex_double_type_node = build_complex_type (double_type_node, true);
10594 complex_long_double_type_node = build_complex_type (long_double_type_node,
10595 true);
10597 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10599 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10600 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10601 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10604 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10605 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10606 sat_ ## KIND ## _type_node = \
10607 make_sat_signed_ ## KIND ## _type (SIZE); \
10608 sat_unsigned_ ## KIND ## _type_node = \
10609 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10610 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10611 unsigned_ ## KIND ## _type_node = \
10612 make_unsigned_ ## KIND ## _type (SIZE);
10614 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10615 sat_ ## WIDTH ## KIND ## _type_node = \
10616 make_sat_signed_ ## KIND ## _type (SIZE); \
10617 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10618 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10619 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10620 unsigned_ ## WIDTH ## KIND ## _type_node = \
10621 make_unsigned_ ## KIND ## _type (SIZE);
10623 /* Make fixed-point type nodes based on four different widths. */
10624 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10625 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10626 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10627 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10628 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10630 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10631 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10632 NAME ## _type_node = \
10633 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10634 u ## NAME ## _type_node = \
10635 make_or_reuse_unsigned_ ## KIND ## _type \
10636 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10637 sat_ ## NAME ## _type_node = \
10638 make_or_reuse_sat_signed_ ## KIND ## _type \
10639 (GET_MODE_BITSIZE (MODE ## mode)); \
10640 sat_u ## NAME ## _type_node = \
10641 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10642 (GET_MODE_BITSIZE (U ## MODE ## mode));
10644 /* Fixed-point type and mode nodes. */
10645 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10646 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10647 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10648 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10649 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10650 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10651 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10652 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10653 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10654 MAKE_FIXED_MODE_NODE (accum, da, DA)
10655 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10658 tree t = targetm.build_builtin_va_list ();
10660 /* Many back-ends define record types without setting TYPE_NAME.
10661 If we copied the record type here, we'd keep the original
10662 record type without a name. This breaks name mangling. So,
10663 don't copy record types and let c_common_nodes_and_builtins()
10664 declare the type to be __builtin_va_list. */
10665 if (TREE_CODE (t) != RECORD_TYPE)
10666 t = build_variant_type_copy (t);
10668 va_list_type_node = t;
10671 /* SCEV analyzer global shared trees. */
10672 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10673 TREE_TYPE (chrec_dont_know) = void_type_node;
10674 chrec_known = make_node (SCEV_KNOWN);
10675 TREE_TYPE (chrec_known) = void_type_node;
10678 /* Modify DECL for given flags.
10679 TM_PURE attribute is set only on types, so the function will modify
10680 DECL's type when ECF_TM_PURE is used. */
10682 void
10683 set_call_expr_flags (tree decl, int flags)
10685 if (flags & ECF_NOTHROW)
10686 TREE_NOTHROW (decl) = 1;
10687 if (flags & ECF_CONST)
10688 TREE_READONLY (decl) = 1;
10689 if (flags & ECF_PURE)
10690 DECL_PURE_P (decl) = 1;
10691 if (flags & ECF_LOOPING_CONST_OR_PURE)
10692 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10693 if (flags & ECF_NOVOPS)
10694 DECL_IS_NOVOPS (decl) = 1;
10695 if (flags & ECF_NORETURN)
10696 TREE_THIS_VOLATILE (decl) = 1;
10697 if (flags & ECF_MALLOC)
10698 DECL_IS_MALLOC (decl) = 1;
10699 if (flags & ECF_RETURNS_TWICE)
10700 DECL_IS_RETURNS_TWICE (decl) = 1;
10701 if (flags & ECF_LEAF)
10702 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10703 NULL, DECL_ATTRIBUTES (decl));
10704 if (flags & ECF_COLD)
10705 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10706 NULL, DECL_ATTRIBUTES (decl));
10707 if (flags & ECF_RET1)
10708 DECL_ATTRIBUTES (decl)
10709 = tree_cons (get_identifier ("fn spec"),
10710 build_tree_list (NULL_TREE, build_string (1, "1")),
10711 DECL_ATTRIBUTES (decl));
10712 if ((flags & ECF_TM_PURE) && flag_tm)
10713 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10714 /* Looping const or pure is implied by noreturn.
10715 There is currently no way to declare looping const or looping pure alone. */
10716 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10717 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10721 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10723 static void
10724 local_define_builtin (const char *name, tree type, enum built_in_function code,
10725 const char *library_name, int ecf_flags)
10727 tree decl;
10729 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10730 library_name, NULL_TREE);
10731 set_call_expr_flags (decl, ecf_flags);
10733 set_builtin_decl (code, decl, true);
10736 /* Call this function after instantiating all builtins that the language
10737 front end cares about. This will build the rest of the builtins
10738 and internal functions that are relied upon by the tree optimizers and
10739 the middle-end. */
10741 void
10742 build_common_builtin_nodes (void)
10744 tree tmp, ftype;
10745 int ecf_flags;
10747 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10748 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10750 ftype = build_function_type (void_type_node, void_list_node);
10751 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10752 local_define_builtin ("__builtin_unreachable", ftype,
10753 BUILT_IN_UNREACHABLE,
10754 "__builtin_unreachable",
10755 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10756 | ECF_CONST | ECF_COLD);
10757 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10758 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10759 "abort",
10760 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10763 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10764 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10766 ftype = build_function_type_list (ptr_type_node,
10767 ptr_type_node, const_ptr_type_node,
10768 size_type_node, NULL_TREE);
10770 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10771 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10772 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10773 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10774 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10775 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10778 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10780 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10781 const_ptr_type_node, size_type_node,
10782 NULL_TREE);
10783 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10784 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10787 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10789 ftype = build_function_type_list (ptr_type_node,
10790 ptr_type_node, integer_type_node,
10791 size_type_node, NULL_TREE);
10792 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10793 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10796 /* If we're checking the stack, `alloca' can throw. */
10797 const int alloca_flags
10798 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10800 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10802 ftype = build_function_type_list (ptr_type_node,
10803 size_type_node, NULL_TREE);
10804 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10805 "alloca", alloca_flags);
10808 ftype = build_function_type_list (ptr_type_node, size_type_node,
10809 size_type_node, NULL_TREE);
10810 local_define_builtin ("__builtin_alloca_with_align", ftype,
10811 BUILT_IN_ALLOCA_WITH_ALIGN,
10812 "__builtin_alloca_with_align",
10813 alloca_flags);
10815 ftype = build_function_type_list (ptr_type_node, size_type_node,
10816 size_type_node, size_type_node, NULL_TREE);
10817 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10818 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10819 "__builtin_alloca_with_align_and_max",
10820 alloca_flags);
10822 ftype = build_function_type_list (void_type_node,
10823 ptr_type_node, ptr_type_node,
10824 ptr_type_node, NULL_TREE);
10825 local_define_builtin ("__builtin_init_trampoline", ftype,
10826 BUILT_IN_INIT_TRAMPOLINE,
10827 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10828 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10829 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10830 "__builtin_init_heap_trampoline",
10831 ECF_NOTHROW | ECF_LEAF);
10832 local_define_builtin ("__builtin_init_descriptor", ftype,
10833 BUILT_IN_INIT_DESCRIPTOR,
10834 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10836 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10837 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10838 BUILT_IN_ADJUST_TRAMPOLINE,
10839 "__builtin_adjust_trampoline",
10840 ECF_CONST | ECF_NOTHROW);
10841 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10842 BUILT_IN_ADJUST_DESCRIPTOR,
10843 "__builtin_adjust_descriptor",
10844 ECF_CONST | ECF_NOTHROW);
10846 ftype = build_function_type_list (void_type_node,
10847 ptr_type_node, ptr_type_node, NULL_TREE);
10848 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10849 BUILT_IN_NONLOCAL_GOTO,
10850 "__builtin_nonlocal_goto",
10851 ECF_NORETURN | ECF_NOTHROW);
10853 ftype = build_function_type_list (void_type_node,
10854 ptr_type_node, ptr_type_node, NULL_TREE);
10855 local_define_builtin ("__builtin_setjmp_setup", ftype,
10856 BUILT_IN_SETJMP_SETUP,
10857 "__builtin_setjmp_setup", ECF_NOTHROW);
10859 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10860 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10861 BUILT_IN_SETJMP_RECEIVER,
10862 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10864 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10865 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10866 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10868 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10869 local_define_builtin ("__builtin_stack_restore", ftype,
10870 BUILT_IN_STACK_RESTORE,
10871 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10873 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10874 const_ptr_type_node, size_type_node,
10875 NULL_TREE);
10876 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10877 "__builtin_memcmp_eq",
10878 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10880 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10881 "__builtin_strncmp_eq",
10882 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10884 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10885 "__builtin_strcmp_eq",
10886 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10888 /* If there's a possibility that we might use the ARM EABI, build the
10889 alternate __cxa_end_cleanup node used to resume from C++. */
10890 if (targetm.arm_eabi_unwinder)
10892 ftype = build_function_type_list (void_type_node, NULL_TREE);
10893 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10894 BUILT_IN_CXA_END_CLEANUP,
10895 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10898 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10899 local_define_builtin ("__builtin_unwind_resume", ftype,
10900 BUILT_IN_UNWIND_RESUME,
10901 ((targetm_common.except_unwind_info (&global_options)
10902 == UI_SJLJ)
10903 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10904 ECF_NORETURN);
10906 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10908 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10909 NULL_TREE);
10910 local_define_builtin ("__builtin_return_address", ftype,
10911 BUILT_IN_RETURN_ADDRESS,
10912 "__builtin_return_address",
10913 ECF_NOTHROW);
10916 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10917 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10919 ftype = build_function_type_list (void_type_node, ptr_type_node,
10920 ptr_type_node, NULL_TREE);
10921 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10922 local_define_builtin ("__cyg_profile_func_enter", ftype,
10923 BUILT_IN_PROFILE_FUNC_ENTER,
10924 "__cyg_profile_func_enter", 0);
10925 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10926 local_define_builtin ("__cyg_profile_func_exit", ftype,
10927 BUILT_IN_PROFILE_FUNC_EXIT,
10928 "__cyg_profile_func_exit", 0);
10931 /* The exception object and filter values from the runtime. The argument
10932 must be zero before exception lowering, i.e. from the front end. After
10933 exception lowering, it will be the region number for the exception
10934 landing pad. These functions are PURE instead of CONST to prevent
10935 them from being hoisted past the exception edge that will initialize
10936 its value in the landing pad. */
10937 ftype = build_function_type_list (ptr_type_node,
10938 integer_type_node, NULL_TREE);
10939 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10940 /* Only use TM_PURE if we have TM language support. */
10941 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10942 ecf_flags |= ECF_TM_PURE;
10943 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10944 "__builtin_eh_pointer", ecf_flags);
10946 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10947 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10948 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10949 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10951 ftype = build_function_type_list (void_type_node,
10952 integer_type_node, integer_type_node,
10953 NULL_TREE);
10954 local_define_builtin ("__builtin_eh_copy_values", ftype,
10955 BUILT_IN_EH_COPY_VALUES,
10956 "__builtin_eh_copy_values", ECF_NOTHROW);
10958 /* Complex multiplication and division. These are handled as builtins
10959 rather than optabs because emit_library_call_value doesn't support
10960 complex. Further, we can do slightly better with folding these
10961 beasties if the real and complex parts of the arguments are separate. */
10963 int mode;
10965 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10967 char mode_name_buf[4], *q;
10968 const char *p;
10969 enum built_in_function mcode, dcode;
10970 tree type, inner_type;
10971 const char *prefix = "__";
10973 if (targetm.libfunc_gnu_prefix)
10974 prefix = "__gnu_";
10976 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10977 if (type == NULL)
10978 continue;
10979 inner_type = TREE_TYPE (type);
10981 ftype = build_function_type_list (type, inner_type, inner_type,
10982 inner_type, inner_type, NULL_TREE);
10984 mcode = ((enum built_in_function)
10985 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10986 dcode = ((enum built_in_function)
10987 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10989 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10990 *q = TOLOWER (*p);
10991 *q = '\0';
10993 /* For -ftrapping-math these should throw from a former
10994 -fnon-call-exception stmt. */
10995 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10996 NULL);
10997 local_define_builtin (built_in_names[mcode], ftype, mcode,
10998 built_in_names[mcode],
10999 ECF_CONST | ECF_LEAF);
11001 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
11002 NULL);
11003 local_define_builtin (built_in_names[dcode], ftype, dcode,
11004 built_in_names[dcode],
11005 ECF_CONST | ECF_LEAF);
11009 init_internal_fns ();
11012 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
11013 better way.
11015 If we requested a pointer to a vector, build up the pointers that
11016 we stripped off while looking for the inner type. Similarly for
11017 return values from functions.
11019 The argument TYPE is the top of the chain, and BOTTOM is the
11020 new type which we will point to. */
11022 tree
11023 reconstruct_complex_type (tree type, tree bottom)
11025 tree inner, outer;
11027 if (TREE_CODE (type) == POINTER_TYPE)
11029 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11030 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
11031 TYPE_REF_CAN_ALIAS_ALL (type));
11033 else if (TREE_CODE (type) == REFERENCE_TYPE)
11035 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11036 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
11037 TYPE_REF_CAN_ALIAS_ALL (type));
11039 else if (TREE_CODE (type) == ARRAY_TYPE)
11041 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11042 outer = build_array_type (inner, TYPE_DOMAIN (type));
11044 else if (TREE_CODE (type) == FUNCTION_TYPE)
11046 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11047 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
11049 else if (TREE_CODE (type) == METHOD_TYPE)
11051 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11052 /* The build_method_type_directly() routine prepends 'this' to argument list,
11053 so we must compensate by getting rid of it. */
11054 outer
11055 = build_method_type_directly
11056 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
11057 inner,
11058 TREE_CHAIN (TYPE_ARG_TYPES (type)));
11060 else if (TREE_CODE (type) == OFFSET_TYPE)
11062 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11063 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
11065 else
11066 return bottom;
11068 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
11069 TYPE_QUALS (type));
11072 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
11073 the inner type. */
11074 tree
11075 build_vector_type_for_mode (tree innertype, machine_mode mode)
11077 poly_int64 nunits;
11078 unsigned int bitsize;
11080 switch (GET_MODE_CLASS (mode))
11082 case MODE_VECTOR_BOOL:
11083 case MODE_VECTOR_INT:
11084 case MODE_VECTOR_FLOAT:
11085 case MODE_VECTOR_FRACT:
11086 case MODE_VECTOR_UFRACT:
11087 case MODE_VECTOR_ACCUM:
11088 case MODE_VECTOR_UACCUM:
11089 nunits = GET_MODE_NUNITS (mode);
11090 break;
11092 case MODE_INT:
11093 /* Check that there are no leftover bits. */
11094 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
11095 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
11096 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
11097 break;
11099 default:
11100 gcc_unreachable ();
11103 return make_vector_type (innertype, nunits, mode);
11106 /* Similarly, but takes the inner type and number of units, which must be
11107 a power of two. */
11109 tree
11110 build_vector_type (tree innertype, poly_int64 nunits)
11112 return make_vector_type (innertype, nunits, VOIDmode);
11115 /* Build truth vector with specified length and number of units. */
11117 tree
11118 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
11120 machine_mode mask_mode
11121 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
11123 poly_uint64 vsize;
11124 if (mask_mode == BLKmode)
11125 vsize = vector_size * BITS_PER_UNIT;
11126 else
11127 vsize = GET_MODE_BITSIZE (mask_mode);
11129 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
11131 tree bool_type = build_nonstandard_boolean_type (esize);
11133 return make_vector_type (bool_type, nunits, mask_mode);
11136 /* Returns a vector type corresponding to a comparison of VECTYPE. */
11138 tree
11139 build_same_sized_truth_vector_type (tree vectype)
11141 if (VECTOR_BOOLEAN_TYPE_P (vectype))
11142 return vectype;
11144 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
11146 if (known_eq (size, 0U))
11147 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
11149 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
11152 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
11154 tree
11155 build_opaque_vector_type (tree innertype, poly_int64 nunits)
11157 tree t = make_vector_type (innertype, nunits, VOIDmode);
11158 tree cand;
11159 /* We always build the non-opaque variant before the opaque one,
11160 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
11161 cand = TYPE_NEXT_VARIANT (t);
11162 if (cand
11163 && TYPE_VECTOR_OPAQUE (cand)
11164 && check_qualified_type (cand, t, TYPE_QUALS (t)))
11165 return cand;
11166 /* Othewise build a variant type and make sure to queue it after
11167 the non-opaque type. */
11168 cand = build_distinct_type_copy (t);
11169 TYPE_VECTOR_OPAQUE (cand) = true;
11170 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11171 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11172 TYPE_NEXT_VARIANT (t) = cand;
11173 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11174 return cand;
11177 /* Return the value of element I of VECTOR_CST T as a wide_int. */
11179 wide_int
11180 vector_cst_int_elt (const_tree t, unsigned int i)
11182 /* First handle elements that are directly encoded. */
11183 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11184 if (i < encoded_nelts)
11185 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
11187 /* Identify the pattern that contains element I and work out the index of
11188 the last encoded element for that pattern. */
11189 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11190 unsigned int pattern = i % npatterns;
11191 unsigned int count = i / npatterns;
11192 unsigned int final_i = encoded_nelts - npatterns + pattern;
11194 /* If there are no steps, the final encoded value is the right one. */
11195 if (!VECTOR_CST_STEPPED_P (t))
11196 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11198 /* Otherwise work out the value from the last two encoded elements. */
11199 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11200 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11201 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11202 return wi::to_wide (v2) + (count - 2) * diff;
11205 /* Return the value of element I of VECTOR_CST T. */
11207 tree
11208 vector_cst_elt (const_tree t, unsigned int i)
11210 /* First handle elements that are directly encoded. */
11211 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11212 if (i < encoded_nelts)
11213 return VECTOR_CST_ENCODED_ELT (t, i);
11215 /* If there are no steps, the final encoded value is the right one. */
11216 if (!VECTOR_CST_STEPPED_P (t))
11218 /* Identify the pattern that contains element I and work out the index of
11219 the last encoded element for that pattern. */
11220 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11221 unsigned int pattern = i % npatterns;
11222 unsigned int final_i = encoded_nelts - npatterns + pattern;
11223 return VECTOR_CST_ENCODED_ELT (t, final_i);
11226 /* Otherwise work out the value from the last two encoded elements. */
11227 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11228 vector_cst_int_elt (t, i));
11231 /* Given an initializer INIT, return TRUE if INIT is zero or some
11232 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11233 null, set *NONZERO if and only if INIT is known not to be all
11234 zeros. The combination of return value of false and *NONZERO
11235 false implies that INIT may but need not be all zeros. Other
11236 combinations indicate definitive answers. */
11238 bool
11239 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11241 bool dummy;
11242 if (!nonzero)
11243 nonzero = &dummy;
11245 /* Conservatively clear NONZERO and set it only if INIT is definitely
11246 not all zero. */
11247 *nonzero = false;
11249 STRIP_NOPS (init);
11251 unsigned HOST_WIDE_INT off = 0;
11253 switch (TREE_CODE (init))
11255 case INTEGER_CST:
11256 if (integer_zerop (init))
11257 return true;
11259 *nonzero = true;
11260 return false;
11262 case REAL_CST:
11263 /* ??? Note that this is not correct for C4X float formats. There,
11264 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11265 negative exponent. */
11266 if (real_zerop (init)
11267 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11268 return true;
11270 *nonzero = true;
11271 return false;
11273 case FIXED_CST:
11274 if (fixed_zerop (init))
11275 return true;
11277 *nonzero = true;
11278 return false;
11280 case COMPLEX_CST:
11281 if (integer_zerop (init)
11282 || (real_zerop (init)
11283 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11284 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11285 return true;
11287 *nonzero = true;
11288 return false;
11290 case VECTOR_CST:
11291 if (VECTOR_CST_NPATTERNS (init) == 1
11292 && VECTOR_CST_DUPLICATE_P (init)
11293 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11294 return true;
11296 *nonzero = true;
11297 return false;
11299 case CONSTRUCTOR:
11301 if (TREE_CLOBBER_P (init))
11302 return false;
11304 unsigned HOST_WIDE_INT idx;
11305 tree elt;
11307 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11308 if (!initializer_zerop (elt, nonzero))
11309 return false;
11311 return true;
11314 case MEM_REF:
11316 tree arg = TREE_OPERAND (init, 0);
11317 if (TREE_CODE (arg) != ADDR_EXPR)
11318 return false;
11319 tree offset = TREE_OPERAND (init, 1);
11320 if (TREE_CODE (offset) != INTEGER_CST
11321 || !tree_fits_uhwi_p (offset))
11322 return false;
11323 off = tree_to_uhwi (offset);
11324 if (INT_MAX < off)
11325 return false;
11326 arg = TREE_OPERAND (arg, 0);
11327 if (TREE_CODE (arg) != STRING_CST)
11328 return false;
11329 init = arg;
11331 /* Fall through. */
11333 case STRING_CST:
11335 gcc_assert (off <= INT_MAX);
11337 int i = off;
11338 int n = TREE_STRING_LENGTH (init);
11339 if (n <= i)
11340 return false;
11342 /* We need to loop through all elements to handle cases like
11343 "\0" and "\0foobar". */
11344 for (i = 0; i < n; ++i)
11345 if (TREE_STRING_POINTER (init)[i] != '\0')
11347 *nonzero = true;
11348 return false;
11351 return true;
11354 default:
11355 return false;
11359 /* Return true if EXPR is an initializer expression in which every element
11360 is a constant that is numerically equal to 0 or 1. The elements do not
11361 need to be equal to each other. */
11363 bool
11364 initializer_each_zero_or_onep (const_tree expr)
11366 STRIP_ANY_LOCATION_WRAPPER (expr);
11368 switch (TREE_CODE (expr))
11370 case INTEGER_CST:
11371 return integer_zerop (expr) || integer_onep (expr);
11373 case REAL_CST:
11374 return real_zerop (expr) || real_onep (expr);
11376 case VECTOR_CST:
11378 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11379 if (VECTOR_CST_STEPPED_P (expr)
11380 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11381 return false;
11383 for (unsigned int i = 0; i < nelts; ++i)
11385 tree elt = vector_cst_elt (expr, i);
11386 if (!initializer_each_zero_or_onep (elt))
11387 return false;
11390 return true;
11393 default:
11394 return false;
11398 /* Given an initializer INIT for a TYPE, return true if INIT is zero
11399 so that it can be replaced by value initialization. This function
11400 distinguishes betwen empty strings as initializers for arrays and
11401 for pointers (which make it return false). */
11403 bool
11404 type_initializer_zero_p (tree type, tree init)
11406 if (type == error_mark_node || init == error_mark_node)
11407 return false;
11409 STRIP_NOPS (init);
11411 if (POINTER_TYPE_P (type))
11412 return TREE_CODE (init) != STRING_CST && initializer_zerop (init);
11414 if (TREE_CODE (init) != CONSTRUCTOR)
11415 return initializer_zerop (init);
11417 if (TREE_CODE (type) == ARRAY_TYPE)
11419 tree elt_type = TREE_TYPE (type);
11420 elt_type = TYPE_MAIN_VARIANT (elt_type);
11421 if (elt_type == char_type_node)
11422 return initializer_zerop (init);
11424 tree elt_init;
11425 unsigned HOST_WIDE_INT i;
11426 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, elt_init)
11427 if (!type_initializer_zero_p (elt_type, elt_init))
11428 return false;
11429 return true;
11432 if (TREE_CODE (type) != RECORD_TYPE)
11433 return initializer_zerop (init);
11435 tree fld = TYPE_FIELDS (type);
11437 tree fld_init;
11438 unsigned HOST_WIDE_INT i;
11439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, fld_init)
11441 /* Advance to the next member, skipping over everything that
11442 canot be initialized (including unnamed bit-fields). */
11443 while (TREE_CODE (fld) != FIELD_DECL
11444 || DECL_ARTIFICIAL (fld)
11445 || (DECL_BIT_FIELD (fld) && !DECL_NAME (fld)))
11447 fld = DECL_CHAIN (fld);
11448 if (!fld)
11449 return true;
11450 continue;
11453 tree fldtype = TREE_TYPE (fld);
11454 if (!type_initializer_zero_p (fldtype, fld_init))
11455 return false;
11457 fld = DECL_CHAIN (fld);
11458 if (!fld)
11459 break;
11462 return true;
11465 /* Check if vector VEC consists of all the equal elements and
11466 that the number of elements corresponds to the type of VEC.
11467 The function returns first element of the vector
11468 or NULL_TREE if the vector is not uniform. */
11469 tree
11470 uniform_vector_p (const_tree vec)
11472 tree first, t;
11473 unsigned HOST_WIDE_INT i, nelts;
11475 if (vec == NULL_TREE)
11476 return NULL_TREE;
11478 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11480 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11481 return TREE_OPERAND (vec, 0);
11483 else if (TREE_CODE (vec) == VECTOR_CST)
11485 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11486 return VECTOR_CST_ENCODED_ELT (vec, 0);
11487 return NULL_TREE;
11490 else if (TREE_CODE (vec) == CONSTRUCTOR
11491 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11493 first = error_mark_node;
11495 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11497 if (i == 0)
11499 first = t;
11500 continue;
11502 if (!operand_equal_p (first, t, 0))
11503 return NULL_TREE;
11505 if (i != nelts)
11506 return NULL_TREE;
11508 return first;
11511 return NULL_TREE;
11514 /* If the argument is INTEGER_CST, return it. If the argument is vector
11515 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11516 return NULL_TREE.
11517 Look through location wrappers. */
11519 tree
11520 uniform_integer_cst_p (tree t)
11522 STRIP_ANY_LOCATION_WRAPPER (t);
11524 if (TREE_CODE (t) == INTEGER_CST)
11525 return t;
11527 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11529 t = uniform_vector_p (t);
11530 if (t && TREE_CODE (t) == INTEGER_CST)
11531 return t;
11534 return NULL_TREE;
11537 /* If VECTOR_CST T has a single nonzero element, return the index of that
11538 element, otherwise return -1. */
11541 single_nonzero_element (const_tree t)
11543 unsigned HOST_WIDE_INT nelts;
11544 unsigned int repeat_nelts;
11545 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11546 repeat_nelts = nelts;
11547 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11549 nelts = vector_cst_encoded_nelts (t);
11550 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11552 else
11553 return -1;
11555 int res = -1;
11556 for (unsigned int i = 0; i < nelts; ++i)
11558 tree elt = vector_cst_elt (t, i);
11559 if (!integer_zerop (elt) && !real_zerop (elt))
11561 if (res >= 0 || i >= repeat_nelts)
11562 return -1;
11563 res = i;
11566 return res;
11569 /* Build an empty statement at location LOC. */
11571 tree
11572 build_empty_stmt (location_t loc)
11574 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11575 SET_EXPR_LOCATION (t, loc);
11576 return t;
11580 /* Build an OpenMP clause with code CODE. LOC is the location of the
11581 clause. */
11583 tree
11584 build_omp_clause (location_t loc, enum omp_clause_code code)
11586 tree t;
11587 int size, length;
11589 length = omp_clause_num_ops[code];
11590 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11592 record_node_allocation_statistics (OMP_CLAUSE, size);
11594 t = (tree) ggc_internal_alloc (size);
11595 memset (t, 0, size);
11596 TREE_SET_CODE (t, OMP_CLAUSE);
11597 OMP_CLAUSE_SET_CODE (t, code);
11598 OMP_CLAUSE_LOCATION (t) = loc;
11600 return t;
11603 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11604 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11605 Except for the CODE and operand count field, other storage for the
11606 object is initialized to zeros. */
11608 tree
11609 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11611 tree t;
11612 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11614 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11615 gcc_assert (len >= 1);
11617 record_node_allocation_statistics (code, length);
11619 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11621 TREE_SET_CODE (t, code);
11623 /* Can't use TREE_OPERAND to store the length because if checking is
11624 enabled, it will try to check the length before we store it. :-P */
11625 t->exp.operands[0] = build_int_cst (sizetype, len);
11627 return t;
11630 /* Helper function for build_call_* functions; build a CALL_EXPR with
11631 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11632 the argument slots. */
11634 static tree
11635 build_call_1 (tree return_type, tree fn, int nargs)
11637 tree t;
11639 t = build_vl_exp (CALL_EXPR, nargs + 3);
11640 TREE_TYPE (t) = return_type;
11641 CALL_EXPR_FN (t) = fn;
11642 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11644 return t;
11647 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11648 FN and a null static chain slot. NARGS is the number of call arguments
11649 which are specified as "..." arguments. */
11651 tree
11652 build_call_nary (tree return_type, tree fn, int nargs, ...)
11654 tree ret;
11655 va_list args;
11656 va_start (args, nargs);
11657 ret = build_call_valist (return_type, fn, nargs, args);
11658 va_end (args);
11659 return ret;
11662 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11663 FN and a null static chain slot. NARGS is the number of call arguments
11664 which are specified as a va_list ARGS. */
11666 tree
11667 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11669 tree t;
11670 int i;
11672 t = build_call_1 (return_type, fn, nargs);
11673 for (i = 0; i < nargs; i++)
11674 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11675 process_call_operands (t);
11676 return t;
11679 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11680 FN and a null static chain slot. NARGS is the number of call arguments
11681 which are specified as a tree array ARGS. */
11683 tree
11684 build_call_array_loc (location_t loc, tree return_type, tree fn,
11685 int nargs, const tree *args)
11687 tree t;
11688 int i;
11690 t = build_call_1 (return_type, fn, nargs);
11691 for (i = 0; i < nargs; i++)
11692 CALL_EXPR_ARG (t, i) = args[i];
11693 process_call_operands (t);
11694 SET_EXPR_LOCATION (t, loc);
11695 return t;
11698 /* Like build_call_array, but takes a vec. */
11700 tree
11701 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11703 tree ret, t;
11704 unsigned int ix;
11706 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11707 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11708 CALL_EXPR_ARG (ret, ix) = t;
11709 process_call_operands (ret);
11710 return ret;
11713 /* Conveniently construct a function call expression. FNDECL names the
11714 function to be called and N arguments are passed in the array
11715 ARGARRAY. */
11717 tree
11718 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11720 tree fntype = TREE_TYPE (fndecl);
11721 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11723 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11726 /* Conveniently construct a function call expression. FNDECL names the
11727 function to be called and the arguments are passed in the vector
11728 VEC. */
11730 tree
11731 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11733 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11734 vec_safe_address (vec));
11738 /* Conveniently construct a function call expression. FNDECL names the
11739 function to be called, N is the number of arguments, and the "..."
11740 parameters are the argument expressions. */
11742 tree
11743 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11745 va_list ap;
11746 tree *argarray = XALLOCAVEC (tree, n);
11747 int i;
11749 va_start (ap, n);
11750 for (i = 0; i < n; i++)
11751 argarray[i] = va_arg (ap, tree);
11752 va_end (ap);
11753 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11756 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11757 varargs macros aren't supported by all bootstrap compilers. */
11759 tree
11760 build_call_expr (tree fndecl, int n, ...)
11762 va_list ap;
11763 tree *argarray = XALLOCAVEC (tree, n);
11764 int i;
11766 va_start (ap, n);
11767 for (i = 0; i < n; i++)
11768 argarray[i] = va_arg (ap, tree);
11769 va_end (ap);
11770 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11773 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11774 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11775 It will get gimplified later into an ordinary internal function. */
11777 tree
11778 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11779 tree type, int n, const tree *args)
11781 tree t = build_call_1 (type, NULL_TREE, n);
11782 for (int i = 0; i < n; ++i)
11783 CALL_EXPR_ARG (t, i) = args[i];
11784 SET_EXPR_LOCATION (t, loc);
11785 CALL_EXPR_IFN (t) = ifn;
11786 return t;
11789 /* Build internal call expression. This is just like CALL_EXPR, except
11790 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11791 internal function. */
11793 tree
11794 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11795 tree type, int n, ...)
11797 va_list ap;
11798 tree *argarray = XALLOCAVEC (tree, n);
11799 int i;
11801 va_start (ap, n);
11802 for (i = 0; i < n; i++)
11803 argarray[i] = va_arg (ap, tree);
11804 va_end (ap);
11805 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11808 /* Return a function call to FN, if the target is guaranteed to support it,
11809 or null otherwise.
11811 N is the number of arguments, passed in the "...", and TYPE is the
11812 type of the return value. */
11814 tree
11815 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11816 int n, ...)
11818 va_list ap;
11819 tree *argarray = XALLOCAVEC (tree, n);
11820 int i;
11822 va_start (ap, n);
11823 for (i = 0; i < n; i++)
11824 argarray[i] = va_arg (ap, tree);
11825 va_end (ap);
11826 if (internal_fn_p (fn))
11828 internal_fn ifn = as_internal_fn (fn);
11829 if (direct_internal_fn_p (ifn))
11831 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11832 if (!direct_internal_fn_supported_p (ifn, types,
11833 OPTIMIZE_FOR_BOTH))
11834 return NULL_TREE;
11836 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11838 else
11840 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11841 if (!fndecl)
11842 return NULL_TREE;
11843 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11847 /* Return a function call to the appropriate builtin alloca variant.
11849 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11850 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11851 bound for SIZE in case it is not a fixed value. */
11853 tree
11854 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11856 if (max_size >= 0)
11858 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11859 return
11860 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11862 else if (align > 0)
11864 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11865 return build_call_expr (t, 2, size, size_int (align));
11867 else
11869 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11870 return build_call_expr (t, 1, size);
11874 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11875 if SIZE == -1) and return a tree node representing char* pointer to
11876 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11877 is the LEN bytes at STR (the representation of the string, which may
11878 be wide). */
11880 tree
11881 build_string_literal (int len, const char *str,
11882 tree eltype /* = char_type_node */,
11883 unsigned HOST_WIDE_INT size /* = -1 */)
11885 tree t = build_string (len, str);
11886 /* Set the maximum valid index based on the string length or SIZE. */
11887 unsigned HOST_WIDE_INT maxidx
11888 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11890 tree index = build_index_type (size_int (maxidx));
11891 eltype = build_type_variant (eltype, 1, 0);
11892 tree type = build_array_type (eltype, index);
11893 TREE_TYPE (t) = type;
11894 TREE_CONSTANT (t) = 1;
11895 TREE_READONLY (t) = 1;
11896 TREE_STATIC (t) = 1;
11898 type = build_pointer_type (eltype);
11899 t = build1 (ADDR_EXPR, type,
11900 build4 (ARRAY_REF, eltype,
11901 t, integer_zero_node, NULL_TREE, NULL_TREE));
11902 return t;
11907 /* Return true if T (assumed to be a DECL) must be assigned a memory
11908 location. */
11910 bool
11911 needs_to_live_in_memory (const_tree t)
11913 return (TREE_ADDRESSABLE (t)
11914 || is_global_var (t)
11915 || (TREE_CODE (t) == RESULT_DECL
11916 && !DECL_BY_REFERENCE (t)
11917 && aggregate_value_p (t, current_function_decl)));
11920 /* Return value of a constant X and sign-extend it. */
11922 HOST_WIDE_INT
11923 int_cst_value (const_tree x)
11925 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11926 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11928 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11929 gcc_assert (cst_and_fits_in_hwi (x));
11931 if (bits < HOST_BITS_PER_WIDE_INT)
11933 bool negative = ((val >> (bits - 1)) & 1) != 0;
11934 if (negative)
11935 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11936 else
11937 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11940 return val;
11943 /* If TYPE is an integral or pointer type, return an integer type with
11944 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11945 if TYPE is already an integer type of signedness UNSIGNEDP.
11946 If TYPE is a floating-point type, return an integer type with the same
11947 bitsize and with the signedness given by UNSIGNEDP; this is useful
11948 when doing bit-level operations on a floating-point value. */
11950 tree
11951 signed_or_unsigned_type_for (int unsignedp, tree type)
11953 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11954 return type;
11956 if (TREE_CODE (type) == VECTOR_TYPE)
11958 tree inner = TREE_TYPE (type);
11959 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11960 if (!inner2)
11961 return NULL_TREE;
11962 if (inner == inner2)
11963 return type;
11964 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11967 if (TREE_CODE (type) == COMPLEX_TYPE)
11969 tree inner = TREE_TYPE (type);
11970 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11971 if (!inner2)
11972 return NULL_TREE;
11973 if (inner == inner2)
11974 return type;
11975 return build_complex_type (inner2);
11978 unsigned int bits;
11979 if (INTEGRAL_TYPE_P (type)
11980 || POINTER_TYPE_P (type)
11981 || TREE_CODE (type) == OFFSET_TYPE)
11982 bits = TYPE_PRECISION (type);
11983 else if (TREE_CODE (type) == REAL_TYPE)
11984 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11985 else
11986 return NULL_TREE;
11988 return build_nonstandard_integer_type (bits, unsignedp);
11991 /* If TYPE is an integral or pointer type, return an integer type with
11992 the same precision which is unsigned, or itself if TYPE is already an
11993 unsigned integer type. If TYPE is a floating-point type, return an
11994 unsigned integer type with the same bitsize as TYPE. */
11996 tree
11997 unsigned_type_for (tree type)
11999 return signed_or_unsigned_type_for (1, type);
12002 /* If TYPE is an integral or pointer type, return an integer type with
12003 the same precision which is signed, or itself if TYPE is already a
12004 signed integer type. If TYPE is a floating-point type, return a
12005 signed integer type with the same bitsize as TYPE. */
12007 tree
12008 signed_type_for (tree type)
12010 return signed_or_unsigned_type_for (0, type);
12013 /* If TYPE is a vector type, return a signed integer vector type with the
12014 same width and number of subparts. Otherwise return boolean_type_node. */
12016 tree
12017 truth_type_for (tree type)
12019 if (TREE_CODE (type) == VECTOR_TYPE)
12021 if (VECTOR_BOOLEAN_TYPE_P (type))
12022 return type;
12023 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
12024 GET_MODE_SIZE (TYPE_MODE (type)));
12026 else
12027 return boolean_type_node;
12030 /* Returns the largest value obtainable by casting something in INNER type to
12031 OUTER type. */
12033 tree
12034 upper_bound_in_type (tree outer, tree inner)
12036 unsigned int det = 0;
12037 unsigned oprec = TYPE_PRECISION (outer);
12038 unsigned iprec = TYPE_PRECISION (inner);
12039 unsigned prec;
12041 /* Compute a unique number for every combination. */
12042 det |= (oprec > iprec) ? 4 : 0;
12043 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
12044 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
12046 /* Determine the exponent to use. */
12047 switch (det)
12049 case 0:
12050 case 1:
12051 /* oprec <= iprec, outer: signed, inner: don't care. */
12052 prec = oprec - 1;
12053 break;
12054 case 2:
12055 case 3:
12056 /* oprec <= iprec, outer: unsigned, inner: don't care. */
12057 prec = oprec;
12058 break;
12059 case 4:
12060 /* oprec > iprec, outer: signed, inner: signed. */
12061 prec = iprec - 1;
12062 break;
12063 case 5:
12064 /* oprec > iprec, outer: signed, inner: unsigned. */
12065 prec = iprec;
12066 break;
12067 case 6:
12068 /* oprec > iprec, outer: unsigned, inner: signed. */
12069 prec = oprec;
12070 break;
12071 case 7:
12072 /* oprec > iprec, outer: unsigned, inner: unsigned. */
12073 prec = iprec;
12074 break;
12075 default:
12076 gcc_unreachable ();
12079 return wide_int_to_tree (outer,
12080 wi::mask (prec, false, TYPE_PRECISION (outer)));
12083 /* Returns the smallest value obtainable by casting something in INNER type to
12084 OUTER type. */
12086 tree
12087 lower_bound_in_type (tree outer, tree inner)
12089 unsigned oprec = TYPE_PRECISION (outer);
12090 unsigned iprec = TYPE_PRECISION (inner);
12092 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
12093 and obtain 0. */
12094 if (TYPE_UNSIGNED (outer)
12095 /* If we are widening something of an unsigned type, OUTER type
12096 contains all values of INNER type. In particular, both INNER
12097 and OUTER types have zero in common. */
12098 || (oprec > iprec && TYPE_UNSIGNED (inner)))
12099 return build_int_cst (outer, 0);
12100 else
12102 /* If we are widening a signed type to another signed type, we
12103 want to obtain -2^^(iprec-1). If we are keeping the
12104 precision or narrowing to a signed type, we want to obtain
12105 -2^(oprec-1). */
12106 unsigned prec = oprec > iprec ? iprec : oprec;
12107 return wide_int_to_tree (outer,
12108 wi::mask (prec - 1, true,
12109 TYPE_PRECISION (outer)));
12113 /* Return nonzero if two operands that are suitable for PHI nodes are
12114 necessarily equal. Specifically, both ARG0 and ARG1 must be either
12115 SSA_NAME or invariant. Note that this is strictly an optimization.
12116 That is, callers of this function can directly call operand_equal_p
12117 and get the same result, only slower. */
12120 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
12122 if (arg0 == arg1)
12123 return 1;
12124 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
12125 return 0;
12126 return operand_equal_p (arg0, arg1, 0);
12129 /* Returns number of zeros at the end of binary representation of X. */
12131 tree
12132 num_ending_zeros (const_tree x)
12134 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
12138 #define WALK_SUBTREE(NODE) \
12139 do \
12141 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
12142 if (result) \
12143 return result; \
12145 while (0)
12147 /* This is a subroutine of walk_tree that walks field of TYPE that are to
12148 be walked whenever a type is seen in the tree. Rest of operands and return
12149 value are as for walk_tree. */
12151 static tree
12152 walk_type_fields (tree type, walk_tree_fn func, void *data,
12153 hash_set<tree> *pset, walk_tree_lh lh)
12155 tree result = NULL_TREE;
12157 switch (TREE_CODE (type))
12159 case POINTER_TYPE:
12160 case REFERENCE_TYPE:
12161 case VECTOR_TYPE:
12162 /* We have to worry about mutually recursive pointers. These can't
12163 be written in C. They can in Ada. It's pathological, but
12164 there's an ACATS test (c38102a) that checks it. Deal with this
12165 by checking if we're pointing to another pointer, that one
12166 points to another pointer, that one does too, and we have no htab.
12167 If so, get a hash table. We check three levels deep to avoid
12168 the cost of the hash table if we don't need one. */
12169 if (POINTER_TYPE_P (TREE_TYPE (type))
12170 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
12171 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
12172 && !pset)
12174 result = walk_tree_without_duplicates (&TREE_TYPE (type),
12175 func, data);
12176 if (result)
12177 return result;
12179 break;
12182 /* fall through */
12184 case COMPLEX_TYPE:
12185 WALK_SUBTREE (TREE_TYPE (type));
12186 break;
12188 case METHOD_TYPE:
12189 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
12191 /* Fall through. */
12193 case FUNCTION_TYPE:
12194 WALK_SUBTREE (TREE_TYPE (type));
12196 tree arg;
12198 /* We never want to walk into default arguments. */
12199 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
12200 WALK_SUBTREE (TREE_VALUE (arg));
12202 break;
12204 case ARRAY_TYPE:
12205 /* Don't follow this nodes's type if a pointer for fear that
12206 we'll have infinite recursion. If we have a PSET, then we
12207 need not fear. */
12208 if (pset
12209 || (!POINTER_TYPE_P (TREE_TYPE (type))
12210 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
12211 WALK_SUBTREE (TREE_TYPE (type));
12212 WALK_SUBTREE (TYPE_DOMAIN (type));
12213 break;
12215 case OFFSET_TYPE:
12216 WALK_SUBTREE (TREE_TYPE (type));
12217 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
12218 break;
12220 default:
12221 break;
12224 return NULL_TREE;
12227 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
12228 called with the DATA and the address of each sub-tree. If FUNC returns a
12229 non-NULL value, the traversal is stopped, and the value returned by FUNC
12230 is returned. If PSET is non-NULL it is used to record the nodes visited,
12231 and to avoid visiting a node more than once. */
12233 tree
12234 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12235 hash_set<tree> *pset, walk_tree_lh lh)
12237 enum tree_code code;
12238 int walk_subtrees;
12239 tree result;
12241 #define WALK_SUBTREE_TAIL(NODE) \
12242 do \
12244 tp = & (NODE); \
12245 goto tail_recurse; \
12247 while (0)
12249 tail_recurse:
12250 /* Skip empty subtrees. */
12251 if (!*tp)
12252 return NULL_TREE;
12254 /* Don't walk the same tree twice, if the user has requested
12255 that we avoid doing so. */
12256 if (pset && pset->add (*tp))
12257 return NULL_TREE;
12259 /* Call the function. */
12260 walk_subtrees = 1;
12261 result = (*func) (tp, &walk_subtrees, data);
12263 /* If we found something, return it. */
12264 if (result)
12265 return result;
12267 code = TREE_CODE (*tp);
12269 /* Even if we didn't, FUNC may have decided that there was nothing
12270 interesting below this point in the tree. */
12271 if (!walk_subtrees)
12273 /* But we still need to check our siblings. */
12274 if (code == TREE_LIST)
12275 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12276 else if (code == OMP_CLAUSE)
12277 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12278 else
12279 return NULL_TREE;
12282 if (lh)
12284 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12285 if (result || !walk_subtrees)
12286 return result;
12289 switch (code)
12291 case ERROR_MARK:
12292 case IDENTIFIER_NODE:
12293 case INTEGER_CST:
12294 case REAL_CST:
12295 case FIXED_CST:
12296 case VECTOR_CST:
12297 case STRING_CST:
12298 case BLOCK:
12299 case PLACEHOLDER_EXPR:
12300 case SSA_NAME:
12301 case FIELD_DECL:
12302 case RESULT_DECL:
12303 /* None of these have subtrees other than those already walked
12304 above. */
12305 break;
12307 case TREE_LIST:
12308 WALK_SUBTREE (TREE_VALUE (*tp));
12309 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12310 break;
12312 case TREE_VEC:
12314 int len = TREE_VEC_LENGTH (*tp);
12316 if (len == 0)
12317 break;
12319 /* Walk all elements but the first. */
12320 while (--len)
12321 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12323 /* Now walk the first one as a tail call. */
12324 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12327 case COMPLEX_CST:
12328 WALK_SUBTREE (TREE_REALPART (*tp));
12329 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12331 case CONSTRUCTOR:
12333 unsigned HOST_WIDE_INT idx;
12334 constructor_elt *ce;
12336 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12337 idx++)
12338 WALK_SUBTREE (ce->value);
12340 break;
12342 case SAVE_EXPR:
12343 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12345 case BIND_EXPR:
12347 tree decl;
12348 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12350 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12351 into declarations that are just mentioned, rather than
12352 declared; they don't really belong to this part of the tree.
12353 And, we can see cycles: the initializer for a declaration
12354 can refer to the declaration itself. */
12355 WALK_SUBTREE (DECL_INITIAL (decl));
12356 WALK_SUBTREE (DECL_SIZE (decl));
12357 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12359 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12362 case STATEMENT_LIST:
12364 tree_stmt_iterator i;
12365 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12366 WALK_SUBTREE (*tsi_stmt_ptr (i));
12368 break;
12370 case OMP_CLAUSE:
12371 switch (OMP_CLAUSE_CODE (*tp))
12373 case OMP_CLAUSE_GANG:
12374 case OMP_CLAUSE__GRIDDIM_:
12375 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12376 /* FALLTHRU */
12378 case OMP_CLAUSE_ASYNC:
12379 case OMP_CLAUSE_WAIT:
12380 case OMP_CLAUSE_WORKER:
12381 case OMP_CLAUSE_VECTOR:
12382 case OMP_CLAUSE_NUM_GANGS:
12383 case OMP_CLAUSE_NUM_WORKERS:
12384 case OMP_CLAUSE_VECTOR_LENGTH:
12385 case OMP_CLAUSE_PRIVATE:
12386 case OMP_CLAUSE_SHARED:
12387 case OMP_CLAUSE_FIRSTPRIVATE:
12388 case OMP_CLAUSE_COPYIN:
12389 case OMP_CLAUSE_COPYPRIVATE:
12390 case OMP_CLAUSE_FINAL:
12391 case OMP_CLAUSE_IF:
12392 case OMP_CLAUSE_NUM_THREADS:
12393 case OMP_CLAUSE_SCHEDULE:
12394 case OMP_CLAUSE_UNIFORM:
12395 case OMP_CLAUSE_DEPEND:
12396 case OMP_CLAUSE_NONTEMPORAL:
12397 case OMP_CLAUSE_NUM_TEAMS:
12398 case OMP_CLAUSE_THREAD_LIMIT:
12399 case OMP_CLAUSE_DEVICE:
12400 case OMP_CLAUSE_DIST_SCHEDULE:
12401 case OMP_CLAUSE_SAFELEN:
12402 case OMP_CLAUSE_SIMDLEN:
12403 case OMP_CLAUSE_ORDERED:
12404 case OMP_CLAUSE_PRIORITY:
12405 case OMP_CLAUSE_GRAINSIZE:
12406 case OMP_CLAUSE_NUM_TASKS:
12407 case OMP_CLAUSE_HINT:
12408 case OMP_CLAUSE_TO_DECLARE:
12409 case OMP_CLAUSE_LINK:
12410 case OMP_CLAUSE_USE_DEVICE_PTR:
12411 case OMP_CLAUSE_USE_DEVICE_ADDR:
12412 case OMP_CLAUSE_IS_DEVICE_PTR:
12413 case OMP_CLAUSE_INCLUSIVE:
12414 case OMP_CLAUSE_EXCLUSIVE:
12415 case OMP_CLAUSE__LOOPTEMP_:
12416 case OMP_CLAUSE__REDUCTEMP_:
12417 case OMP_CLAUSE__CONDTEMP_:
12418 case OMP_CLAUSE__SCANTEMP_:
12419 case OMP_CLAUSE__SIMDUID_:
12420 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12421 /* FALLTHRU */
12423 case OMP_CLAUSE_INDEPENDENT:
12424 case OMP_CLAUSE_NOWAIT:
12425 case OMP_CLAUSE_DEFAULT:
12426 case OMP_CLAUSE_UNTIED:
12427 case OMP_CLAUSE_MERGEABLE:
12428 case OMP_CLAUSE_PROC_BIND:
12429 case OMP_CLAUSE_DEVICE_TYPE:
12430 case OMP_CLAUSE_INBRANCH:
12431 case OMP_CLAUSE_NOTINBRANCH:
12432 case OMP_CLAUSE_FOR:
12433 case OMP_CLAUSE_PARALLEL:
12434 case OMP_CLAUSE_SECTIONS:
12435 case OMP_CLAUSE_TASKGROUP:
12436 case OMP_CLAUSE_NOGROUP:
12437 case OMP_CLAUSE_THREADS:
12438 case OMP_CLAUSE_SIMD:
12439 case OMP_CLAUSE_DEFAULTMAP:
12440 case OMP_CLAUSE_ORDER:
12441 case OMP_CLAUSE_BIND:
12442 case OMP_CLAUSE_AUTO:
12443 case OMP_CLAUSE_SEQ:
12444 case OMP_CLAUSE_TILE:
12445 case OMP_CLAUSE__SIMT_:
12446 case OMP_CLAUSE_IF_PRESENT:
12447 case OMP_CLAUSE_FINALIZE:
12448 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12450 case OMP_CLAUSE_LASTPRIVATE:
12451 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12452 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12453 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12455 case OMP_CLAUSE_COLLAPSE:
12457 int i;
12458 for (i = 0; i < 3; i++)
12459 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12460 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12463 case OMP_CLAUSE_LINEAR:
12464 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12465 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12466 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12467 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12469 case OMP_CLAUSE_ALIGNED:
12470 case OMP_CLAUSE_FROM:
12471 case OMP_CLAUSE_TO:
12472 case OMP_CLAUSE_MAP:
12473 case OMP_CLAUSE__CACHE_:
12474 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12475 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12476 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12478 case OMP_CLAUSE_REDUCTION:
12479 case OMP_CLAUSE_TASK_REDUCTION:
12480 case OMP_CLAUSE_IN_REDUCTION:
12482 int i;
12483 for (i = 0; i < 5; i++)
12484 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12485 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12488 default:
12489 gcc_unreachable ();
12491 break;
12493 case TARGET_EXPR:
12495 int i, len;
12497 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12498 But, we only want to walk once. */
12499 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12500 for (i = 0; i < len; ++i)
12501 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12502 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12505 case DECL_EXPR:
12506 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12507 defining. We only want to walk into these fields of a type in this
12508 case and not in the general case of a mere reference to the type.
12510 The criterion is as follows: if the field can be an expression, it
12511 must be walked only here. This should be in keeping with the fields
12512 that are directly gimplified in gimplify_type_sizes in order for the
12513 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12514 variable-sized types.
12516 Note that DECLs get walked as part of processing the BIND_EXPR. */
12517 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12519 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12520 if (TREE_CODE (*type_p) == ERROR_MARK)
12521 return NULL_TREE;
12523 /* Call the function for the type. See if it returns anything or
12524 doesn't want us to continue. If we are to continue, walk both
12525 the normal fields and those for the declaration case. */
12526 result = (*func) (type_p, &walk_subtrees, data);
12527 if (result || !walk_subtrees)
12528 return result;
12530 /* But do not walk a pointed-to type since it may itself need to
12531 be walked in the declaration case if it isn't anonymous. */
12532 if (!POINTER_TYPE_P (*type_p))
12534 result = walk_type_fields (*type_p, func, data, pset, lh);
12535 if (result)
12536 return result;
12539 /* If this is a record type, also walk the fields. */
12540 if (RECORD_OR_UNION_TYPE_P (*type_p))
12542 tree field;
12544 for (field = TYPE_FIELDS (*type_p); field;
12545 field = DECL_CHAIN (field))
12547 /* We'd like to look at the type of the field, but we can
12548 easily get infinite recursion. So assume it's pointed
12549 to elsewhere in the tree. Also, ignore things that
12550 aren't fields. */
12551 if (TREE_CODE (field) != FIELD_DECL)
12552 continue;
12554 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12555 WALK_SUBTREE (DECL_SIZE (field));
12556 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12557 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12558 WALK_SUBTREE (DECL_QUALIFIER (field));
12562 /* Same for scalar types. */
12563 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12564 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12565 || TREE_CODE (*type_p) == INTEGER_TYPE
12566 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12567 || TREE_CODE (*type_p) == REAL_TYPE)
12569 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12570 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12573 WALK_SUBTREE (TYPE_SIZE (*type_p));
12574 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12576 /* FALLTHRU */
12578 default:
12579 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12581 int i, len;
12583 /* Walk over all the sub-trees of this operand. */
12584 len = TREE_OPERAND_LENGTH (*tp);
12586 /* Go through the subtrees. We need to do this in forward order so
12587 that the scope of a FOR_EXPR is handled properly. */
12588 if (len)
12590 for (i = 0; i < len - 1; ++i)
12591 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12592 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12595 /* If this is a type, walk the needed fields in the type. */
12596 else if (TYPE_P (*tp))
12597 return walk_type_fields (*tp, func, data, pset, lh);
12598 break;
12601 /* We didn't find what we were looking for. */
12602 return NULL_TREE;
12604 #undef WALK_SUBTREE_TAIL
12606 #undef WALK_SUBTREE
12608 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12610 tree
12611 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12612 walk_tree_lh lh)
12614 tree result;
12616 hash_set<tree> pset;
12617 result = walk_tree_1 (tp, func, data, &pset, lh);
12618 return result;
12622 tree
12623 tree_block (tree t)
12625 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12627 if (IS_EXPR_CODE_CLASS (c))
12628 return LOCATION_BLOCK (t->exp.locus);
12629 gcc_unreachable ();
12630 return NULL;
12633 void
12634 tree_set_block (tree t, tree b)
12636 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12638 if (IS_EXPR_CODE_CLASS (c))
12640 t->exp.locus = set_block (t->exp.locus, b);
12642 else
12643 gcc_unreachable ();
12646 /* Create a nameless artificial label and put it in the current
12647 function context. The label has a location of LOC. Returns the
12648 newly created label. */
12650 tree
12651 create_artificial_label (location_t loc)
12653 tree lab = build_decl (loc,
12654 LABEL_DECL, NULL_TREE, void_type_node);
12656 DECL_ARTIFICIAL (lab) = 1;
12657 DECL_IGNORED_P (lab) = 1;
12658 DECL_CONTEXT (lab) = current_function_decl;
12659 return lab;
12662 /* Given a tree, try to return a useful variable name that we can use
12663 to prefix a temporary that is being assigned the value of the tree.
12664 I.E. given <temp> = &A, return A. */
12666 const char *
12667 get_name (tree t)
12669 tree stripped_decl;
12671 stripped_decl = t;
12672 STRIP_NOPS (stripped_decl);
12673 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12674 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12675 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12677 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12678 if (!name)
12679 return NULL;
12680 return IDENTIFIER_POINTER (name);
12682 else
12684 switch (TREE_CODE (stripped_decl))
12686 case ADDR_EXPR:
12687 return get_name (TREE_OPERAND (stripped_decl, 0));
12688 default:
12689 return NULL;
12694 /* Return true if TYPE has a variable argument list. */
12696 bool
12697 stdarg_p (const_tree fntype)
12699 function_args_iterator args_iter;
12700 tree n = NULL_TREE, t;
12702 if (!fntype)
12703 return false;
12705 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12707 n = t;
12710 return n != NULL_TREE && n != void_type_node;
12713 /* Return true if TYPE has a prototype. */
12715 bool
12716 prototype_p (const_tree fntype)
12718 tree t;
12720 gcc_assert (fntype != NULL_TREE);
12722 t = TYPE_ARG_TYPES (fntype);
12723 return (t != NULL_TREE);
12726 /* If BLOCK is inlined from an __attribute__((__artificial__))
12727 routine, return pointer to location from where it has been
12728 called. */
12729 location_t *
12730 block_nonartificial_location (tree block)
12732 location_t *ret = NULL;
12734 while (block && TREE_CODE (block) == BLOCK
12735 && BLOCK_ABSTRACT_ORIGIN (block))
12737 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12738 if (TREE_CODE (ao) == FUNCTION_DECL)
12740 /* If AO is an artificial inline, point RET to the
12741 call site locus at which it has been inlined and continue
12742 the loop, in case AO's caller is also an artificial
12743 inline. */
12744 if (DECL_DECLARED_INLINE_P (ao)
12745 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12746 ret = &BLOCK_SOURCE_LOCATION (block);
12747 else
12748 break;
12750 else if (TREE_CODE (ao) != BLOCK)
12751 break;
12753 block = BLOCK_SUPERCONTEXT (block);
12755 return ret;
12759 /* If EXP is inlined from an __attribute__((__artificial__))
12760 function, return the location of the original call expression. */
12762 location_t
12763 tree_nonartificial_location (tree exp)
12765 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12767 if (loc)
12768 return *loc;
12769 else
12770 return EXPR_LOCATION (exp);
12774 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12775 nodes. */
12777 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12779 hashval_t
12780 cl_option_hasher::hash (tree x)
12782 const_tree const t = x;
12783 const char *p;
12784 size_t i;
12785 size_t len = 0;
12786 hashval_t hash = 0;
12788 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12790 p = (const char *)TREE_OPTIMIZATION (t);
12791 len = sizeof (struct cl_optimization);
12794 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12795 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12797 else
12798 gcc_unreachable ();
12800 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12801 something else. */
12802 for (i = 0; i < len; i++)
12803 if (p[i])
12804 hash = (hash << 4) ^ ((i << 2) | p[i]);
12806 return hash;
12809 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12810 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12811 same. */
12813 bool
12814 cl_option_hasher::equal (tree x, tree y)
12816 const_tree const xt = x;
12817 const_tree const yt = y;
12819 if (TREE_CODE (xt) != TREE_CODE (yt))
12820 return 0;
12822 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12823 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12824 TREE_OPTIMIZATION (yt));
12825 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12826 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12827 TREE_TARGET_OPTION (yt));
12828 else
12829 gcc_unreachable ();
12832 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12834 tree
12835 build_optimization_node (struct gcc_options *opts)
12837 tree t;
12839 /* Use the cache of optimization nodes. */
12841 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12842 opts);
12844 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12845 t = *slot;
12846 if (!t)
12848 /* Insert this one into the hash table. */
12849 t = cl_optimization_node;
12850 *slot = t;
12852 /* Make a new node for next time round. */
12853 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12856 return t;
12859 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12861 tree
12862 build_target_option_node (struct gcc_options *opts)
12864 tree t;
12866 /* Use the cache of optimization nodes. */
12868 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12869 opts);
12871 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12872 t = *slot;
12873 if (!t)
12875 /* Insert this one into the hash table. */
12876 t = cl_target_option_node;
12877 *slot = t;
12879 /* Make a new node for next time round. */
12880 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12883 return t;
12886 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12887 so that they aren't saved during PCH writing. */
12889 void
12890 prepare_target_option_nodes_for_pch (void)
12892 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12893 for (; iter != cl_option_hash_table->end (); ++iter)
12894 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12895 TREE_TARGET_GLOBALS (*iter) = NULL;
12898 /* Determine the "ultimate origin" of a block. */
12900 tree
12901 block_ultimate_origin (const_tree block)
12903 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12905 if (origin == NULL_TREE)
12906 return NULL_TREE;
12907 else
12909 gcc_checking_assert ((DECL_P (origin)
12910 && DECL_ORIGIN (origin) == origin)
12911 || BLOCK_ORIGIN (origin) == origin);
12912 return origin;
12916 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12917 no instruction. */
12919 bool
12920 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12922 /* Do not strip casts into or out of differing address spaces. */
12923 if (POINTER_TYPE_P (outer_type)
12924 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12926 if (!POINTER_TYPE_P (inner_type)
12927 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12928 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12929 return false;
12931 else if (POINTER_TYPE_P (inner_type)
12932 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12934 /* We already know that outer_type is not a pointer with
12935 a non-generic address space. */
12936 return false;
12939 /* Use precision rather then machine mode when we can, which gives
12940 the correct answer even for submode (bit-field) types. */
12941 if ((INTEGRAL_TYPE_P (outer_type)
12942 || POINTER_TYPE_P (outer_type)
12943 || TREE_CODE (outer_type) == OFFSET_TYPE)
12944 && (INTEGRAL_TYPE_P (inner_type)
12945 || POINTER_TYPE_P (inner_type)
12946 || TREE_CODE (inner_type) == OFFSET_TYPE))
12947 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12949 /* Otherwise fall back on comparing machine modes (e.g. for
12950 aggregate types, floats). */
12951 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12954 /* Return true iff conversion in EXP generates no instruction. Mark
12955 it inline so that we fully inline into the stripping functions even
12956 though we have two uses of this function. */
12958 static inline bool
12959 tree_nop_conversion (const_tree exp)
12961 tree outer_type, inner_type;
12963 if (location_wrapper_p (exp))
12964 return true;
12965 if (!CONVERT_EXPR_P (exp)
12966 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12967 return false;
12969 outer_type = TREE_TYPE (exp);
12970 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12971 if (!inner_type || inner_type == error_mark_node)
12972 return false;
12974 return tree_nop_conversion_p (outer_type, inner_type);
12977 /* Return true iff conversion in EXP generates no instruction. Don't
12978 consider conversions changing the signedness. */
12980 static bool
12981 tree_sign_nop_conversion (const_tree exp)
12983 tree outer_type, inner_type;
12985 if (!tree_nop_conversion (exp))
12986 return false;
12988 outer_type = TREE_TYPE (exp);
12989 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12991 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12992 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12995 /* Strip conversions from EXP according to tree_nop_conversion and
12996 return the resulting expression. */
12998 tree
12999 tree_strip_nop_conversions (tree exp)
13001 while (tree_nop_conversion (exp))
13002 exp = TREE_OPERAND (exp, 0);
13003 return exp;
13006 /* Strip conversions from EXP according to tree_sign_nop_conversion
13007 and return the resulting expression. */
13009 tree
13010 tree_strip_sign_nop_conversions (tree exp)
13012 while (tree_sign_nop_conversion (exp))
13013 exp = TREE_OPERAND (exp, 0);
13014 return exp;
13017 /* Avoid any floating point extensions from EXP. */
13018 tree
13019 strip_float_extensions (tree exp)
13021 tree sub, expt, subt;
13023 /* For floating point constant look up the narrowest type that can hold
13024 it properly and handle it like (type)(narrowest_type)constant.
13025 This way we can optimize for instance a=a*2.0 where "a" is float
13026 but 2.0 is double constant. */
13027 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
13029 REAL_VALUE_TYPE orig;
13030 tree type = NULL;
13032 orig = TREE_REAL_CST (exp);
13033 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
13034 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
13035 type = float_type_node;
13036 else if (TYPE_PRECISION (TREE_TYPE (exp))
13037 > TYPE_PRECISION (double_type_node)
13038 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
13039 type = double_type_node;
13040 if (type)
13041 return build_real_truncate (type, orig);
13044 if (!CONVERT_EXPR_P (exp))
13045 return exp;
13047 sub = TREE_OPERAND (exp, 0);
13048 subt = TREE_TYPE (sub);
13049 expt = TREE_TYPE (exp);
13051 if (!FLOAT_TYPE_P (subt))
13052 return exp;
13054 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
13055 return exp;
13057 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
13058 return exp;
13060 return strip_float_extensions (sub);
13063 /* Strip out all handled components that produce invariant
13064 offsets. */
13066 const_tree
13067 strip_invariant_refs (const_tree op)
13069 while (handled_component_p (op))
13071 switch (TREE_CODE (op))
13073 case ARRAY_REF:
13074 case ARRAY_RANGE_REF:
13075 if (!is_gimple_constant (TREE_OPERAND (op, 1))
13076 || TREE_OPERAND (op, 2) != NULL_TREE
13077 || TREE_OPERAND (op, 3) != NULL_TREE)
13078 return NULL;
13079 break;
13081 case COMPONENT_REF:
13082 if (TREE_OPERAND (op, 2) != NULL_TREE)
13083 return NULL;
13084 break;
13086 default:;
13088 op = TREE_OPERAND (op, 0);
13091 return op;
13094 static GTY(()) tree gcc_eh_personality_decl;
13096 /* Return the GCC personality function decl. */
13098 tree
13099 lhd_gcc_personality (void)
13101 if (!gcc_eh_personality_decl)
13102 gcc_eh_personality_decl = build_personality_function ("gcc");
13103 return gcc_eh_personality_decl;
13106 /* TARGET is a call target of GIMPLE call statement
13107 (obtained by gimple_call_fn). Return true if it is
13108 OBJ_TYPE_REF representing an virtual call of C++ method.
13109 (As opposed to OBJ_TYPE_REF representing objc calls
13110 through a cast where middle-end devirtualization machinery
13111 can't apply.) */
13113 bool
13114 virtual_method_call_p (const_tree target)
13116 if (TREE_CODE (target) != OBJ_TYPE_REF)
13117 return false;
13118 tree t = TREE_TYPE (target);
13119 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
13120 t = TREE_TYPE (t);
13121 if (TREE_CODE (t) == FUNCTION_TYPE)
13122 return false;
13123 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
13124 /* If we do not have BINFO associated, it means that type was built
13125 without devirtualization enabled. Do not consider this a virtual
13126 call. */
13127 if (!TYPE_BINFO (obj_type_ref_class (target)))
13128 return false;
13129 return true;
13132 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
13134 static tree
13135 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
13137 unsigned int i;
13138 tree base_binfo, b;
13140 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13141 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
13142 && types_same_for_odr (TREE_TYPE (base_binfo), type))
13143 return base_binfo;
13144 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
13145 return b;
13146 return NULL;
13149 /* Try to find a base info of BINFO that would have its field decl at offset
13150 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
13151 found, return, otherwise return NULL_TREE. */
13153 tree
13154 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
13156 tree type = BINFO_TYPE (binfo);
13158 while (true)
13160 HOST_WIDE_INT pos, size;
13161 tree fld;
13162 int i;
13164 if (types_same_for_odr (type, expected_type))
13165 return binfo;
13166 if (maybe_lt (offset, 0))
13167 return NULL_TREE;
13169 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
13171 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
13172 continue;
13174 pos = int_bit_position (fld);
13175 size = tree_to_uhwi (DECL_SIZE (fld));
13176 if (known_in_range_p (offset, pos, size))
13177 break;
13179 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
13180 return NULL_TREE;
13182 /* Offset 0 indicates the primary base, whose vtable contents are
13183 represented in the binfo for the derived class. */
13184 else if (maybe_ne (offset, 0))
13186 tree found_binfo = NULL, base_binfo;
13187 /* Offsets in BINFO are in bytes relative to the whole structure
13188 while POS is in bits relative to the containing field. */
13189 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
13190 / BITS_PER_UNIT);
13192 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13193 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
13194 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
13196 found_binfo = base_binfo;
13197 break;
13199 if (found_binfo)
13200 binfo = found_binfo;
13201 else
13202 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
13203 binfo_offset);
13206 type = TREE_TYPE (fld);
13207 offset -= pos;
13211 /* Returns true if X is a typedef decl. */
13213 bool
13214 is_typedef_decl (const_tree x)
13216 return (x && TREE_CODE (x) == TYPE_DECL
13217 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
13220 /* Returns true iff TYPE is a type variant created for a typedef. */
13222 bool
13223 typedef_variant_p (const_tree type)
13225 return is_typedef_decl (TYPE_NAME (type));
13228 /* A class to handle converting a string that might contain
13229 control characters, (eg newline, form-feed, etc), into one
13230 in which contains escape sequences instead. */
13232 class escaped_string
13234 public:
13235 escaped_string () { m_owned = false; m_str = NULL; };
13236 ~escaped_string () { if (m_owned) free (m_str); }
13237 operator const char *() const { return (const char *) m_str; }
13238 void escape (const char *);
13239 private:
13240 char *m_str;
13241 bool m_owned;
13244 /* PR 84195: Replace control characters in "unescaped" with their
13245 escaped equivalents. Allow newlines if -fmessage-length has
13246 been set to a non-zero value. This is done here, rather than
13247 where the attribute is recorded as the message length can
13248 change between these two locations. */
13250 void
13251 escaped_string::escape (const char *unescaped)
13253 char *escaped;
13254 size_t i, new_i, len;
13256 if (m_owned)
13257 free (m_str);
13259 m_str = const_cast<char *> (unescaped);
13260 m_owned = false;
13262 if (unescaped == NULL || *unescaped == 0)
13263 return;
13265 len = strlen (unescaped);
13266 escaped = NULL;
13267 new_i = 0;
13269 for (i = 0; i < len; i++)
13271 char c = unescaped[i];
13273 if (!ISCNTRL (c))
13275 if (escaped)
13276 escaped[new_i++] = c;
13277 continue;
13280 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13282 if (escaped == NULL)
13284 /* We only allocate space for a new string if we
13285 actually encounter a control character that
13286 needs replacing. */
13287 escaped = (char *) xmalloc (len * 2 + 1);
13288 strncpy (escaped, unescaped, i);
13289 new_i = i;
13292 escaped[new_i++] = '\\';
13294 switch (c)
13296 case '\a': escaped[new_i++] = 'a'; break;
13297 case '\b': escaped[new_i++] = 'b'; break;
13298 case '\f': escaped[new_i++] = 'f'; break;
13299 case '\n': escaped[new_i++] = 'n'; break;
13300 case '\r': escaped[new_i++] = 'r'; break;
13301 case '\t': escaped[new_i++] = 't'; break;
13302 case '\v': escaped[new_i++] = 'v'; break;
13303 default: escaped[new_i++] = '?'; break;
13306 else if (escaped)
13307 escaped[new_i++] = c;
13310 if (escaped)
13312 escaped[new_i] = 0;
13313 m_str = escaped;
13314 m_owned = true;
13318 /* Warn about a use of an identifier which was marked deprecated. Returns
13319 whether a warning was given. */
13321 bool
13322 warn_deprecated_use (tree node, tree attr)
13324 escaped_string msg;
13326 if (node == 0 || !warn_deprecated_decl)
13327 return false;
13329 if (!attr)
13331 if (DECL_P (node))
13332 attr = DECL_ATTRIBUTES (node);
13333 else if (TYPE_P (node))
13335 tree decl = TYPE_STUB_DECL (node);
13336 if (decl)
13337 attr = lookup_attribute ("deprecated",
13338 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13342 if (attr)
13343 attr = lookup_attribute ("deprecated", attr);
13345 if (attr)
13346 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13348 bool w = false;
13349 if (DECL_P (node))
13351 auto_diagnostic_group d;
13352 if (msg)
13353 w = warning (OPT_Wdeprecated_declarations,
13354 "%qD is deprecated: %s", node, (const char *) msg);
13355 else
13356 w = warning (OPT_Wdeprecated_declarations,
13357 "%qD is deprecated", node);
13358 if (w)
13359 inform (DECL_SOURCE_LOCATION (node), "declared here");
13361 else if (TYPE_P (node))
13363 tree what = NULL_TREE;
13364 tree decl = TYPE_STUB_DECL (node);
13366 if (TYPE_NAME (node))
13368 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13369 what = TYPE_NAME (node);
13370 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13371 && DECL_NAME (TYPE_NAME (node)))
13372 what = DECL_NAME (TYPE_NAME (node));
13375 auto_diagnostic_group d;
13376 if (what)
13378 if (msg)
13379 w = warning (OPT_Wdeprecated_declarations,
13380 "%qE is deprecated: %s", what, (const char *) msg);
13381 else
13382 w = warning (OPT_Wdeprecated_declarations,
13383 "%qE is deprecated", what);
13385 else
13387 if (msg)
13388 w = warning (OPT_Wdeprecated_declarations,
13389 "type is deprecated: %s", (const char *) msg);
13390 else
13391 w = warning (OPT_Wdeprecated_declarations,
13392 "type is deprecated");
13395 if (w && decl)
13396 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13399 return w;
13402 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13403 somewhere in it. */
13405 bool
13406 contains_bitfld_component_ref_p (const_tree ref)
13408 while (handled_component_p (ref))
13410 if (TREE_CODE (ref) == COMPONENT_REF
13411 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13412 return true;
13413 ref = TREE_OPERAND (ref, 0);
13416 return false;
13419 /* Try to determine whether a TRY_CATCH expression can fall through.
13420 This is a subroutine of block_may_fallthru. */
13422 static bool
13423 try_catch_may_fallthru (const_tree stmt)
13425 tree_stmt_iterator i;
13427 /* If the TRY block can fall through, the whole TRY_CATCH can
13428 fall through. */
13429 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13430 return true;
13432 i = tsi_start (TREE_OPERAND (stmt, 1));
13433 switch (TREE_CODE (tsi_stmt (i)))
13435 case CATCH_EXPR:
13436 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13437 catch expression and a body. The whole TRY_CATCH may fall
13438 through iff any of the catch bodies falls through. */
13439 for (; !tsi_end_p (i); tsi_next (&i))
13441 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13442 return true;
13444 return false;
13446 case EH_FILTER_EXPR:
13447 /* The exception filter expression only matters if there is an
13448 exception. If the exception does not match EH_FILTER_TYPES,
13449 we will execute EH_FILTER_FAILURE, and we will fall through
13450 if that falls through. If the exception does match
13451 EH_FILTER_TYPES, the stack unwinder will continue up the
13452 stack, so we will not fall through. We don't know whether we
13453 will throw an exception which matches EH_FILTER_TYPES or not,
13454 so we just ignore EH_FILTER_TYPES and assume that we might
13455 throw an exception which doesn't match. */
13456 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13458 default:
13459 /* This case represents statements to be executed when an
13460 exception occurs. Those statements are implicitly followed
13461 by a RESX statement to resume execution after the exception.
13462 So in this case the TRY_CATCH never falls through. */
13463 return false;
13467 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13468 need not be 100% accurate; simply be conservative and return true if we
13469 don't know. This is used only to avoid stupidly generating extra code.
13470 If we're wrong, we'll just delete the extra code later. */
13472 bool
13473 block_may_fallthru (const_tree block)
13475 /* This CONST_CAST is okay because expr_last returns its argument
13476 unmodified and we assign it to a const_tree. */
13477 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13479 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13481 case GOTO_EXPR:
13482 case RETURN_EXPR:
13483 /* Easy cases. If the last statement of the block implies
13484 control transfer, then we can't fall through. */
13485 return false;
13487 case SWITCH_EXPR:
13488 /* If there is a default: label or case labels cover all possible
13489 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13490 to some case label in all cases and all we care is whether the
13491 SWITCH_BODY falls through. */
13492 if (SWITCH_ALL_CASES_P (stmt))
13493 return block_may_fallthru (SWITCH_BODY (stmt));
13494 return true;
13496 case COND_EXPR:
13497 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13498 return true;
13499 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13501 case BIND_EXPR:
13502 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13504 case TRY_CATCH_EXPR:
13505 return try_catch_may_fallthru (stmt);
13507 case TRY_FINALLY_EXPR:
13508 /* The finally clause is always executed after the try clause,
13509 so if it does not fall through, then the try-finally will not
13510 fall through. Otherwise, if the try clause does not fall
13511 through, then when the finally clause falls through it will
13512 resume execution wherever the try clause was going. So the
13513 whole try-finally will only fall through if both the try
13514 clause and the finally clause fall through. */
13515 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13516 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13518 case EH_ELSE_EXPR:
13519 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13521 case MODIFY_EXPR:
13522 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13523 stmt = TREE_OPERAND (stmt, 1);
13524 else
13525 return true;
13526 /* FALLTHRU */
13528 case CALL_EXPR:
13529 /* Functions that do not return do not fall through. */
13530 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13532 case CLEANUP_POINT_EXPR:
13533 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13535 case TARGET_EXPR:
13536 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13538 case ERROR_MARK:
13539 return true;
13541 default:
13542 return lang_hooks.block_may_fallthru (stmt);
13546 /* True if we are using EH to handle cleanups. */
13547 static bool using_eh_for_cleanups_flag = false;
13549 /* This routine is called from front ends to indicate eh should be used for
13550 cleanups. */
13551 void
13552 using_eh_for_cleanups (void)
13554 using_eh_for_cleanups_flag = true;
13557 /* Query whether EH is used for cleanups. */
13558 bool
13559 using_eh_for_cleanups_p (void)
13561 return using_eh_for_cleanups_flag;
13564 /* Wrapper for tree_code_name to ensure that tree code is valid */
13565 const char *
13566 get_tree_code_name (enum tree_code code)
13568 const char *invalid = "<invalid tree code>";
13570 if (code >= MAX_TREE_CODES)
13572 if (code == 0xa5a5)
13573 return "ggc_freed";
13574 return invalid;
13577 return tree_code_name[code];
13580 /* Drops the TREE_OVERFLOW flag from T. */
13582 tree
13583 drop_tree_overflow (tree t)
13585 gcc_checking_assert (TREE_OVERFLOW (t));
13587 /* For tree codes with a sharing machinery re-build the result. */
13588 if (poly_int_tree_p (t))
13589 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13591 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13592 and canonicalize the result. */
13593 if (TREE_CODE (t) == VECTOR_CST)
13595 tree_vector_builder builder;
13596 builder.new_unary_operation (TREE_TYPE (t), t, true);
13597 unsigned int count = builder.encoded_nelts ();
13598 for (unsigned int i = 0; i < count; ++i)
13600 tree elt = VECTOR_CST_ELT (t, i);
13601 if (TREE_OVERFLOW (elt))
13602 elt = drop_tree_overflow (elt);
13603 builder.quick_push (elt);
13605 return builder.build ();
13608 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13609 and drop the flag. */
13610 t = copy_node (t);
13611 TREE_OVERFLOW (t) = 0;
13613 /* For constants that contain nested constants, drop the flag
13614 from those as well. */
13615 if (TREE_CODE (t) == COMPLEX_CST)
13617 if (TREE_OVERFLOW (TREE_REALPART (t)))
13618 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13619 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13620 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13623 return t;
13626 /* Given a memory reference expression T, return its base address.
13627 The base address of a memory reference expression is the main
13628 object being referenced. For instance, the base address for
13629 'array[i].fld[j]' is 'array'. You can think of this as stripping
13630 away the offset part from a memory address.
13632 This function calls handled_component_p to strip away all the inner
13633 parts of the memory reference until it reaches the base object. */
13635 tree
13636 get_base_address (tree t)
13638 while (handled_component_p (t))
13639 t = TREE_OPERAND (t, 0);
13641 if ((TREE_CODE (t) == MEM_REF
13642 || TREE_CODE (t) == TARGET_MEM_REF)
13643 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13644 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13646 /* ??? Either the alias oracle or all callers need to properly deal
13647 with WITH_SIZE_EXPRs before we can look through those. */
13648 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13649 return NULL_TREE;
13651 return t;
13654 /* Return a tree of sizetype representing the size, in bytes, of the element
13655 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13657 tree
13658 array_ref_element_size (tree exp)
13660 tree aligned_size = TREE_OPERAND (exp, 3);
13661 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13662 location_t loc = EXPR_LOCATION (exp);
13664 /* If a size was specified in the ARRAY_REF, it's the size measured
13665 in alignment units of the element type. So multiply by that value. */
13666 if (aligned_size)
13668 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13669 sizetype from another type of the same width and signedness. */
13670 if (TREE_TYPE (aligned_size) != sizetype)
13671 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13672 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13673 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13676 /* Otherwise, take the size from that of the element type. Substitute
13677 any PLACEHOLDER_EXPR that we have. */
13678 else
13679 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13682 /* Return a tree representing the lower bound of the array mentioned in
13683 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13685 tree
13686 array_ref_low_bound (tree exp)
13688 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13690 /* If a lower bound is specified in EXP, use it. */
13691 if (TREE_OPERAND (exp, 2))
13692 return TREE_OPERAND (exp, 2);
13694 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13695 substituting for a PLACEHOLDER_EXPR as needed. */
13696 if (domain_type && TYPE_MIN_VALUE (domain_type))
13697 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13699 /* Otherwise, return a zero of the appropriate type. */
13700 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13703 /* Return a tree representing the upper bound of the array mentioned in
13704 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13706 tree
13707 array_ref_up_bound (tree exp)
13709 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13711 /* If there is a domain type and it has an upper bound, use it, substituting
13712 for a PLACEHOLDER_EXPR as needed. */
13713 if (domain_type && TYPE_MAX_VALUE (domain_type))
13714 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13716 /* Otherwise fail. */
13717 return NULL_TREE;
13720 /* Returns true if REF is an array reference or a component reference
13721 to an array at the end of a structure.
13722 If this is the case, the array may be allocated larger
13723 than its upper bound implies. */
13725 bool
13726 array_at_struct_end_p (tree ref)
13728 tree atype;
13730 if (TREE_CODE (ref) == ARRAY_REF
13731 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13733 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13734 ref = TREE_OPERAND (ref, 0);
13736 else if (TREE_CODE (ref) == COMPONENT_REF
13737 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13738 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13739 else
13740 return false;
13742 if (TREE_CODE (ref) == STRING_CST)
13743 return false;
13745 tree ref_to_array = ref;
13746 while (handled_component_p (ref))
13748 /* If the reference chain contains a component reference to a
13749 non-union type and there follows another field the reference
13750 is not at the end of a structure. */
13751 if (TREE_CODE (ref) == COMPONENT_REF)
13753 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13755 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13756 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13757 nextf = DECL_CHAIN (nextf);
13758 if (nextf)
13759 return false;
13762 /* If we have a multi-dimensional array we do not consider
13763 a non-innermost dimension as flex array if the whole
13764 multi-dimensional array is at struct end.
13765 Same for an array of aggregates with a trailing array
13766 member. */
13767 else if (TREE_CODE (ref) == ARRAY_REF)
13768 return false;
13769 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13771 /* If we view an underlying object as sth else then what we
13772 gathered up to now is what we have to rely on. */
13773 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13774 break;
13775 else
13776 gcc_unreachable ();
13778 ref = TREE_OPERAND (ref, 0);
13781 /* The array now is at struct end. Treat flexible arrays as
13782 always subject to extend, even into just padding constrained by
13783 an underlying decl. */
13784 if (! TYPE_SIZE (atype)
13785 || ! TYPE_DOMAIN (atype)
13786 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13787 return true;
13789 if (TREE_CODE (ref) == MEM_REF
13790 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13791 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13793 /* If the reference is based on a declared entity, the size of the array
13794 is constrained by its given domain. (Do not trust commons PR/69368). */
13795 if (DECL_P (ref)
13796 && !(flag_unconstrained_commons
13797 && VAR_P (ref) && DECL_COMMON (ref))
13798 && DECL_SIZE_UNIT (ref)
13799 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13801 /* Check whether the array domain covers all of the available
13802 padding. */
13803 poly_int64 offset;
13804 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13805 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13806 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13807 return true;
13808 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13809 return true;
13811 /* If at least one extra element fits it is a flexarray. */
13812 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13813 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13814 + 2)
13815 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13816 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13817 return true;
13819 return false;
13822 return true;
13825 /* Return a tree representing the offset, in bytes, of the field referenced
13826 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13828 tree
13829 component_ref_field_offset (tree exp)
13831 tree aligned_offset = TREE_OPERAND (exp, 2);
13832 tree field = TREE_OPERAND (exp, 1);
13833 location_t loc = EXPR_LOCATION (exp);
13835 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13836 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13837 value. */
13838 if (aligned_offset)
13840 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13841 sizetype from another type of the same width and signedness. */
13842 if (TREE_TYPE (aligned_offset) != sizetype)
13843 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13844 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13845 size_int (DECL_OFFSET_ALIGN (field)
13846 / BITS_PER_UNIT));
13849 /* Otherwise, take the offset from that of the field. Substitute
13850 any PLACEHOLDER_EXPR that we have. */
13851 else
13852 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13855 /* Determines the size of the member referenced by the COMPONENT_REF
13856 REF, using its initializer expression if necessary in order to
13857 determine the size of an initialized flexible array member.
13858 Returns the size (which might be zero for an object with
13859 an uninitialized flexible array member) or null if the size
13860 cannot be determined. */
13862 tree
13863 component_ref_size (tree ref)
13865 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13867 tree member = TREE_OPERAND (ref, 1);
13869 /* If the member is not an array, or is not last, or is an array with
13870 more than one element, return its size. Otherwise it's either
13871 a bona fide flexible array member, or a zero-length array member,
13872 or an array of length one treated as such. */
13873 tree size = DECL_SIZE_UNIT (member);
13874 if (size)
13876 tree memtype = TREE_TYPE (member);
13877 if (TREE_CODE (memtype) != ARRAY_TYPE
13878 || !array_at_struct_end_p (ref))
13879 return size;
13881 if (!integer_zerop (size))
13882 if (tree dom = TYPE_DOMAIN (memtype))
13883 if (tree min = TYPE_MIN_VALUE (dom))
13884 if (tree max = TYPE_MAX_VALUE (dom))
13885 if (TREE_CODE (min) == INTEGER_CST
13886 && TREE_CODE (max) == INTEGER_CST)
13888 offset_int minidx = wi::to_offset (min);
13889 offset_int maxidx = wi::to_offset (max);
13890 if (maxidx - minidx > 1)
13891 return size;
13895 /* If the reference is to a declared object and the member a true
13896 flexible array, try to determine its size from its initializer. */
13897 poly_int64 off = 0;
13898 tree base = get_addr_base_and_unit_offset (ref, &off);
13899 if (!base || !VAR_P (base))
13900 return NULL_TREE;
13902 /* The size of any member of a declared object other than a flexible
13903 array member is that obtained above. */
13904 if (size)
13905 return size;
13907 if (tree init = DECL_INITIAL (base))
13908 if (TREE_CODE (init) == CONSTRUCTOR)
13910 off <<= LOG2_BITS_PER_UNIT;
13911 init = fold_ctor_reference (NULL_TREE, init, off, 0, base);
13912 if (init)
13913 return TYPE_SIZE_UNIT (TREE_TYPE (init));
13916 /* Return "don't know" for an external non-array object since its
13917 flexible array member can be initialized to have any number of
13918 elements. Otherwise, return zero because the flexible array
13919 member has no elements. */
13920 return (DECL_EXTERNAL (base) && TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
13921 ? NULL_TREE : integer_zero_node);
13924 /* Return the machine mode of T. For vectors, returns the mode of the
13925 inner type. The main use case is to feed the result to HONOR_NANS,
13926 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13928 machine_mode
13929 element_mode (const_tree t)
13931 if (!TYPE_P (t))
13932 t = TREE_TYPE (t);
13933 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13934 t = TREE_TYPE (t);
13935 return TYPE_MODE (t);
13938 /* Vector types need to re-check the target flags each time we report
13939 the machine mode. We need to do this because attribute target can
13940 change the result of vector_mode_supported_p and have_regs_of_mode
13941 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13942 change on a per-function basis. */
13943 /* ??? Possibly a better solution is to run through all the types
13944 referenced by a function and re-compute the TYPE_MODE once, rather
13945 than make the TYPE_MODE macro call a function. */
13947 machine_mode
13948 vector_type_mode (const_tree t)
13950 machine_mode mode;
13952 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13954 mode = t->type_common.mode;
13955 if (VECTOR_MODE_P (mode)
13956 && (!targetm.vector_mode_supported_p (mode)
13957 || !have_regs_of_mode[mode]))
13959 scalar_int_mode innermode;
13961 /* For integers, try mapping it to a same-sized scalar mode. */
13962 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13964 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13965 * GET_MODE_BITSIZE (innermode));
13966 scalar_int_mode mode;
13967 if (int_mode_for_size (size, 0).exists (&mode)
13968 && have_regs_of_mode[mode])
13969 return mode;
13972 return BLKmode;
13975 return mode;
13978 /* Verify that basic properties of T match TV and thus T can be a variant of
13979 TV. TV should be the more specified variant (i.e. the main variant). */
13981 static bool
13982 verify_type_variant (const_tree t, tree tv)
13984 /* Type variant can differ by:
13986 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13987 ENCODE_QUAL_ADDR_SPACE.
13988 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13989 in this case some values may not be set in the variant types
13990 (see TYPE_COMPLETE_P checks).
13991 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13992 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13993 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13994 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13995 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13996 this is necessary to make it possible to merge types form different TUs
13997 - arrays, pointers and references may have TREE_TYPE that is a variant
13998 of TREE_TYPE of their main variants.
13999 - aggregates may have new TYPE_FIELDS list that list variants of
14000 the main variant TYPE_FIELDS.
14001 - vector types may differ by TYPE_VECTOR_OPAQUE
14004 /* Convenience macro for matching individual fields. */
14005 #define verify_variant_match(flag) \
14006 do { \
14007 if (flag (tv) != flag (t)) \
14009 error ("type variant differs by %s", #flag); \
14010 debug_tree (tv); \
14011 return false; \
14013 } while (false)
14015 /* tree_base checks. */
14017 verify_variant_match (TREE_CODE);
14018 /* FIXME: Ada builds non-artificial variants of artificial types. */
14019 if (TYPE_ARTIFICIAL (tv) && 0)
14020 verify_variant_match (TYPE_ARTIFICIAL);
14021 if (POINTER_TYPE_P (tv))
14022 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
14023 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
14024 verify_variant_match (TYPE_UNSIGNED);
14025 verify_variant_match (TYPE_PACKED);
14026 if (TREE_CODE (t) == REFERENCE_TYPE)
14027 verify_variant_match (TYPE_REF_IS_RVALUE);
14028 if (AGGREGATE_TYPE_P (t))
14029 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
14030 else
14031 verify_variant_match (TYPE_SATURATING);
14032 /* FIXME: This check trigger during libstdc++ build. */
14033 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
14034 verify_variant_match (TYPE_FINAL_P);
14036 /* tree_type_common checks. */
14038 if (COMPLETE_TYPE_P (t))
14040 verify_variant_match (TYPE_MODE);
14041 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
14042 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
14043 verify_variant_match (TYPE_SIZE);
14044 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
14045 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
14046 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
14048 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
14049 TYPE_SIZE_UNIT (tv), 0));
14050 error ("type variant has different %<TYPE_SIZE_UNIT%>");
14051 debug_tree (tv);
14052 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
14053 debug_tree (TYPE_SIZE_UNIT (tv));
14054 error ("type%'s %<TYPE_SIZE_UNIT%>");
14055 debug_tree (TYPE_SIZE_UNIT (t));
14056 return false;
14059 verify_variant_match (TYPE_PRECISION);
14060 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
14061 if (RECORD_OR_UNION_TYPE_P (t))
14062 verify_variant_match (TYPE_TRANSPARENT_AGGR);
14063 else if (TREE_CODE (t) == ARRAY_TYPE)
14064 verify_variant_match (TYPE_NONALIASED_COMPONENT);
14065 /* During LTO we merge variant lists from diferent translation units
14066 that may differ BY TYPE_CONTEXT that in turn may point
14067 to TRANSLATION_UNIT_DECL.
14068 Ada also builds variants of types with different TYPE_CONTEXT. */
14069 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
14070 verify_variant_match (TYPE_CONTEXT);
14071 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
14072 verify_variant_match (TYPE_STRING_FLAG);
14073 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
14074 verify_variant_match (TYPE_CXX_ODR_P);
14075 if (TYPE_ALIAS_SET_KNOWN_P (t))
14077 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
14078 debug_tree (tv);
14079 return false;
14082 /* tree_type_non_common checks. */
14084 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14085 and dangle the pointer from time to time. */
14086 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14087 && (in_lto_p || !TYPE_VFIELD (tv)
14088 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14090 error ("type variant has different %<TYPE_VFIELD%>");
14091 debug_tree (tv);
14092 return false;
14094 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14095 || TREE_CODE (t) == INTEGER_TYPE
14096 || TREE_CODE (t) == BOOLEAN_TYPE
14097 || TREE_CODE (t) == REAL_TYPE
14098 || TREE_CODE (t) == FIXED_POINT_TYPE)
14100 verify_variant_match (TYPE_MAX_VALUE);
14101 verify_variant_match (TYPE_MIN_VALUE);
14103 if (TREE_CODE (t) == METHOD_TYPE)
14104 verify_variant_match (TYPE_METHOD_BASETYPE);
14105 if (TREE_CODE (t) == OFFSET_TYPE)
14106 verify_variant_match (TYPE_OFFSET_BASETYPE);
14107 if (TREE_CODE (t) == ARRAY_TYPE)
14108 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14109 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14110 or even type's main variant. This is needed to make bootstrap pass
14111 and the bug seems new in GCC 5.
14112 C++ FE should be updated to make this consistent and we should check
14113 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14114 is a match with main variant.
14116 Also disable the check for Java for now because of parser hack that builds
14117 first an dummy BINFO and then sometimes replace it by real BINFO in some
14118 of the copies. */
14119 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14120 && TYPE_BINFO (t) != TYPE_BINFO (tv)
14121 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14122 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14123 at LTO time only. */
14124 && (in_lto_p && odr_type_p (t)))
14126 error ("type variant has different %<TYPE_BINFO%>");
14127 debug_tree (tv);
14128 error ("type variant%'s %<TYPE_BINFO%>");
14129 debug_tree (TYPE_BINFO (tv));
14130 error ("type%'s %<TYPE_BINFO%>");
14131 debug_tree (TYPE_BINFO (t));
14132 return false;
14135 /* Check various uses of TYPE_VALUES_RAW. */
14136 if (TREE_CODE (t) == ENUMERAL_TYPE
14137 && TYPE_VALUES (t))
14138 verify_variant_match (TYPE_VALUES);
14139 else if (TREE_CODE (t) == ARRAY_TYPE)
14140 verify_variant_match (TYPE_DOMAIN);
14141 /* Permit incomplete variants of complete type. While FEs may complete
14142 all variants, this does not happen for C++ templates in all cases. */
14143 else if (RECORD_OR_UNION_TYPE_P (t)
14144 && COMPLETE_TYPE_P (t)
14145 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14147 tree f1, f2;
14149 /* Fortran builds qualified variants as new records with items of
14150 qualified type. Verify that they looks same. */
14151 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14152 f1 && f2;
14153 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14154 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14155 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14156 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14157 /* FIXME: gfc_nonrestricted_type builds all types as variants
14158 with exception of pointer types. It deeply copies the type
14159 which means that we may end up with a variant type
14160 referring non-variant pointer. We may change it to
14161 produce types as variants, too, like
14162 objc_get_protocol_qualified_type does. */
14163 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14164 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14165 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14166 break;
14167 if (f1 || f2)
14169 error ("type variant has different %<TYPE_FIELDS%>");
14170 debug_tree (tv);
14171 error ("first mismatch is field");
14172 debug_tree (f1);
14173 error ("and field");
14174 debug_tree (f2);
14175 return false;
14178 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14179 verify_variant_match (TYPE_ARG_TYPES);
14180 /* For C++ the qualified variant of array type is really an array type
14181 of qualified TREE_TYPE.
14182 objc builds variants of pointer where pointer to type is a variant, too
14183 in objc_get_protocol_qualified_type. */
14184 if (TREE_TYPE (t) != TREE_TYPE (tv)
14185 && ((TREE_CODE (t) != ARRAY_TYPE
14186 && !POINTER_TYPE_P (t))
14187 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14188 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14190 error ("type variant has different %<TREE_TYPE%>");
14191 debug_tree (tv);
14192 error ("type variant%'s %<TREE_TYPE%>");
14193 debug_tree (TREE_TYPE (tv));
14194 error ("type%'s %<TREE_TYPE%>");
14195 debug_tree (TREE_TYPE (t));
14196 return false;
14198 if (type_with_alias_set_p (t)
14199 && !gimple_canonical_types_compatible_p (t, tv, false))
14201 error ("type is not compatible with its variant");
14202 debug_tree (tv);
14203 error ("type variant%'s %<TREE_TYPE%>");
14204 debug_tree (TREE_TYPE (tv));
14205 error ("type%'s %<TREE_TYPE%>");
14206 debug_tree (TREE_TYPE (t));
14207 return false;
14209 return true;
14210 #undef verify_variant_match
14214 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14215 the middle-end types_compatible_p function. It needs to avoid
14216 claiming types are different for types that should be treated
14217 the same with respect to TBAA. Canonical types are also used
14218 for IL consistency checks via the useless_type_conversion_p
14219 predicate which does not handle all type kinds itself but falls
14220 back to pointer-comparison of TYPE_CANONICAL for aggregates
14221 for example. */
14223 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14224 type calculation because we need to allow inter-operability between signed
14225 and unsigned variants. */
14227 bool
14228 type_with_interoperable_signedness (const_tree type)
14230 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14231 signed char and unsigned char. Similarly fortran FE builds
14232 C_SIZE_T as signed type, while C defines it unsigned. */
14234 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14235 == INTEGER_TYPE
14236 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14237 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14240 /* Return true iff T1 and T2 are structurally identical for what
14241 TBAA is concerned.
14242 This function is used both by lto.c canonical type merging and by the
14243 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14244 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14245 only for LTO because only in these cases TYPE_CANONICAL equivalence
14246 correspond to one defined by gimple_canonical_types_compatible_p. */
14248 bool
14249 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14250 bool trust_type_canonical)
14252 /* Type variants should be same as the main variant. When not doing sanity
14253 checking to verify this fact, go to main variants and save some work. */
14254 if (trust_type_canonical)
14256 t1 = TYPE_MAIN_VARIANT (t1);
14257 t2 = TYPE_MAIN_VARIANT (t2);
14260 /* Check first for the obvious case of pointer identity. */
14261 if (t1 == t2)
14262 return true;
14264 /* Check that we have two types to compare. */
14265 if (t1 == NULL_TREE || t2 == NULL_TREE)
14266 return false;
14268 /* We consider complete types always compatible with incomplete type.
14269 This does not make sense for canonical type calculation and thus we
14270 need to ensure that we are never called on it.
14272 FIXME: For more correctness the function probably should have three modes
14273 1) mode assuming that types are complete mathcing their structure
14274 2) mode allowing incomplete types but producing equivalence classes
14275 and thus ignoring all info from complete types
14276 3) mode allowing incomplete types to match complete but checking
14277 compatibility between complete types.
14279 1 and 2 can be used for canonical type calculation. 3 is the real
14280 definition of type compatibility that can be used i.e. for warnings during
14281 declaration merging. */
14283 gcc_assert (!trust_type_canonical
14284 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14286 /* If the types have been previously registered and found equal
14287 they still are. */
14289 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14290 && trust_type_canonical)
14292 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14293 they are always NULL, but they are set to non-NULL for types
14294 constructed by build_pointer_type and variants. In this case the
14295 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14296 all pointers are considered equal. Be sure to not return false
14297 negatives. */
14298 gcc_checking_assert (canonical_type_used_p (t1)
14299 && canonical_type_used_p (t2));
14300 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14303 /* For types where we do ODR based TBAA the canonical type is always
14304 set correctly, so we know that types are different if their
14305 canonical types does not match. */
14306 if (trust_type_canonical
14307 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14308 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14309 return false;
14311 /* Can't be the same type if the types don't have the same code. */
14312 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14313 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14314 return false;
14316 /* Qualifiers do not matter for canonical type comparison purposes. */
14318 /* Void types and nullptr types are always the same. */
14319 if (TREE_CODE (t1) == VOID_TYPE
14320 || TREE_CODE (t1) == NULLPTR_TYPE)
14321 return true;
14323 /* Can't be the same type if they have different mode. */
14324 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14325 return false;
14327 /* Non-aggregate types can be handled cheaply. */
14328 if (INTEGRAL_TYPE_P (t1)
14329 || SCALAR_FLOAT_TYPE_P (t1)
14330 || FIXED_POINT_TYPE_P (t1)
14331 || TREE_CODE (t1) == VECTOR_TYPE
14332 || TREE_CODE (t1) == COMPLEX_TYPE
14333 || TREE_CODE (t1) == OFFSET_TYPE
14334 || POINTER_TYPE_P (t1))
14336 /* Can't be the same type if they have different recision. */
14337 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14338 return false;
14340 /* In some cases the signed and unsigned types are required to be
14341 inter-operable. */
14342 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14343 && !type_with_interoperable_signedness (t1))
14344 return false;
14346 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14347 interoperable with "signed char". Unless all frontends are revisited
14348 to agree on these types, we must ignore the flag completely. */
14350 /* Fortran standard define C_PTR type that is compatible with every
14351 C pointer. For this reason we need to glob all pointers into one.
14352 Still pointers in different address spaces are not compatible. */
14353 if (POINTER_TYPE_P (t1))
14355 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14356 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14357 return false;
14360 /* Tail-recurse to components. */
14361 if (TREE_CODE (t1) == VECTOR_TYPE
14362 || TREE_CODE (t1) == COMPLEX_TYPE)
14363 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14364 TREE_TYPE (t2),
14365 trust_type_canonical);
14367 return true;
14370 /* Do type-specific comparisons. */
14371 switch (TREE_CODE (t1))
14373 case ARRAY_TYPE:
14374 /* Array types are the same if the element types are the same and
14375 the number of elements are the same. */
14376 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14377 trust_type_canonical)
14378 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14379 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14380 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14381 return false;
14382 else
14384 tree i1 = TYPE_DOMAIN (t1);
14385 tree i2 = TYPE_DOMAIN (t2);
14387 /* For an incomplete external array, the type domain can be
14388 NULL_TREE. Check this condition also. */
14389 if (i1 == NULL_TREE && i2 == NULL_TREE)
14390 return true;
14391 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14392 return false;
14393 else
14395 tree min1 = TYPE_MIN_VALUE (i1);
14396 tree min2 = TYPE_MIN_VALUE (i2);
14397 tree max1 = TYPE_MAX_VALUE (i1);
14398 tree max2 = TYPE_MAX_VALUE (i2);
14400 /* The minimum/maximum values have to be the same. */
14401 if ((min1 == min2
14402 || (min1 && min2
14403 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14404 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14405 || operand_equal_p (min1, min2, 0))))
14406 && (max1 == max2
14407 || (max1 && max2
14408 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14409 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14410 || operand_equal_p (max1, max2, 0)))))
14411 return true;
14412 else
14413 return false;
14417 case METHOD_TYPE:
14418 case FUNCTION_TYPE:
14419 /* Function types are the same if the return type and arguments types
14420 are the same. */
14421 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14422 trust_type_canonical))
14423 return false;
14425 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14426 return true;
14427 else
14429 tree parms1, parms2;
14431 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14432 parms1 && parms2;
14433 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14435 if (!gimple_canonical_types_compatible_p
14436 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14437 trust_type_canonical))
14438 return false;
14441 if (parms1 || parms2)
14442 return false;
14444 return true;
14447 case RECORD_TYPE:
14448 case UNION_TYPE:
14449 case QUAL_UNION_TYPE:
14451 tree f1, f2;
14453 /* Don't try to compare variants of an incomplete type, before
14454 TYPE_FIELDS has been copied around. */
14455 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14456 return true;
14459 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14460 return false;
14462 /* For aggregate types, all the fields must be the same. */
14463 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14464 f1 || f2;
14465 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14467 /* Skip non-fields and zero-sized fields. */
14468 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14469 || (DECL_SIZE (f1)
14470 && integer_zerop (DECL_SIZE (f1)))))
14471 f1 = TREE_CHAIN (f1);
14472 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14473 || (DECL_SIZE (f2)
14474 && integer_zerop (DECL_SIZE (f2)))))
14475 f2 = TREE_CHAIN (f2);
14476 if (!f1 || !f2)
14477 break;
14478 /* The fields must have the same name, offset and type. */
14479 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14480 || !gimple_compare_field_offset (f1, f2)
14481 || !gimple_canonical_types_compatible_p
14482 (TREE_TYPE (f1), TREE_TYPE (f2),
14483 trust_type_canonical))
14484 return false;
14487 /* If one aggregate has more fields than the other, they
14488 are not the same. */
14489 if (f1 || f2)
14490 return false;
14492 return true;
14495 default:
14496 /* Consider all types with language specific trees in them mutually
14497 compatible. This is executed only from verify_type and false
14498 positives can be tolerated. */
14499 gcc_assert (!in_lto_p);
14500 return true;
14504 /* Verify type T. */
14506 void
14507 verify_type (const_tree t)
14509 bool error_found = false;
14510 tree mv = TYPE_MAIN_VARIANT (t);
14511 if (!mv)
14513 error ("main variant is not defined");
14514 error_found = true;
14516 else if (mv != TYPE_MAIN_VARIANT (mv))
14518 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14519 debug_tree (mv);
14520 error_found = true;
14522 else if (t != mv && !verify_type_variant (t, mv))
14523 error_found = true;
14525 tree ct = TYPE_CANONICAL (t);
14526 if (!ct)
14528 else if (TYPE_CANONICAL (t) != ct)
14530 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14531 debug_tree (ct);
14532 error_found = true;
14534 /* Method and function types cannot be used to address memory and thus
14535 TYPE_CANONICAL really matters only for determining useless conversions.
14537 FIXME: C++ FE produce declarations of builtin functions that are not
14538 compatible with main variants. */
14539 else if (TREE_CODE (t) == FUNCTION_TYPE)
14541 else if (t != ct
14542 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14543 with variably sized arrays because their sizes possibly
14544 gimplified to different variables. */
14545 && !variably_modified_type_p (ct, NULL)
14546 && !gimple_canonical_types_compatible_p (t, ct, false)
14547 && COMPLETE_TYPE_P (t))
14549 error ("%<TYPE_CANONICAL%> is not compatible");
14550 debug_tree (ct);
14551 error_found = true;
14554 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14555 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14557 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14558 debug_tree (ct);
14559 error_found = true;
14561 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14563 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14564 debug_tree (ct);
14565 debug_tree (TYPE_MAIN_VARIANT (ct));
14566 error_found = true;
14570 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14571 if (RECORD_OR_UNION_TYPE_P (t))
14573 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14574 and danagle the pointer from time to time. */
14575 if (TYPE_VFIELD (t)
14576 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14577 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14579 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14580 debug_tree (TYPE_VFIELD (t));
14581 error_found = true;
14584 else if (TREE_CODE (t) == POINTER_TYPE)
14586 if (TYPE_NEXT_PTR_TO (t)
14587 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14589 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14590 debug_tree (TYPE_NEXT_PTR_TO (t));
14591 error_found = true;
14594 else if (TREE_CODE (t) == REFERENCE_TYPE)
14596 if (TYPE_NEXT_REF_TO (t)
14597 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14599 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14600 debug_tree (TYPE_NEXT_REF_TO (t));
14601 error_found = true;
14604 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14605 || TREE_CODE (t) == FIXED_POINT_TYPE)
14607 /* FIXME: The following check should pass:
14608 useless_type_conversion_p (const_cast <tree> (t),
14609 TREE_TYPE (TYPE_MIN_VALUE (t))
14610 but does not for C sizetypes in LTO. */
14613 /* Check various uses of TYPE_MAXVAL_RAW. */
14614 if (RECORD_OR_UNION_TYPE_P (t))
14616 if (!TYPE_BINFO (t))
14618 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14620 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14621 debug_tree (TYPE_BINFO (t));
14622 error_found = true;
14624 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14626 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14627 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14628 error_found = true;
14631 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14633 if (TYPE_METHOD_BASETYPE (t)
14634 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14635 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14637 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14638 debug_tree (TYPE_METHOD_BASETYPE (t));
14639 error_found = true;
14642 else if (TREE_CODE (t) == OFFSET_TYPE)
14644 if (TYPE_OFFSET_BASETYPE (t)
14645 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14646 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14648 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14649 debug_tree (TYPE_OFFSET_BASETYPE (t));
14650 error_found = true;
14653 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14654 || TREE_CODE (t) == FIXED_POINT_TYPE)
14656 /* FIXME: The following check should pass:
14657 useless_type_conversion_p (const_cast <tree> (t),
14658 TREE_TYPE (TYPE_MAX_VALUE (t))
14659 but does not for C sizetypes in LTO. */
14661 else if (TREE_CODE (t) == ARRAY_TYPE)
14663 if (TYPE_ARRAY_MAX_SIZE (t)
14664 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14666 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14667 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14668 error_found = true;
14671 else if (TYPE_MAX_VALUE_RAW (t))
14673 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14674 debug_tree (TYPE_MAX_VALUE_RAW (t));
14675 error_found = true;
14678 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14680 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14681 debug_tree (TYPE_LANG_SLOT_1 (t));
14682 error_found = true;
14685 /* Check various uses of TYPE_VALUES_RAW. */
14686 if (TREE_CODE (t) == ENUMERAL_TYPE)
14687 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14689 tree value = TREE_VALUE (l);
14690 tree name = TREE_PURPOSE (l);
14692 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14693 CONST_DECL of ENUMERAL TYPE. */
14694 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14696 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14697 debug_tree (value);
14698 debug_tree (name);
14699 error_found = true;
14701 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14702 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14704 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14705 "to the enum");
14706 debug_tree (value);
14707 debug_tree (name);
14708 error_found = true;
14710 if (TREE_CODE (name) != IDENTIFIER_NODE)
14712 error ("enum value name is not %<IDENTIFIER_NODE%>");
14713 debug_tree (value);
14714 debug_tree (name);
14715 error_found = true;
14718 else if (TREE_CODE (t) == ARRAY_TYPE)
14720 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14722 error ("array %<TYPE_DOMAIN%> is not integer type");
14723 debug_tree (TYPE_DOMAIN (t));
14724 error_found = true;
14727 else if (RECORD_OR_UNION_TYPE_P (t))
14729 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14731 error ("%<TYPE_FIELDS%> defined in incomplete type");
14732 error_found = true;
14734 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14736 /* TODO: verify properties of decls. */
14737 if (TREE_CODE (fld) == FIELD_DECL)
14739 else if (TREE_CODE (fld) == TYPE_DECL)
14741 else if (TREE_CODE (fld) == CONST_DECL)
14743 else if (VAR_P (fld))
14745 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14747 else if (TREE_CODE (fld) == USING_DECL)
14749 else if (TREE_CODE (fld) == FUNCTION_DECL)
14751 else
14753 error ("wrong tree in %<TYPE_FIELDS%> list");
14754 debug_tree (fld);
14755 error_found = true;
14759 else if (TREE_CODE (t) == INTEGER_TYPE
14760 || TREE_CODE (t) == BOOLEAN_TYPE
14761 || TREE_CODE (t) == OFFSET_TYPE
14762 || TREE_CODE (t) == REFERENCE_TYPE
14763 || TREE_CODE (t) == NULLPTR_TYPE
14764 || TREE_CODE (t) == POINTER_TYPE)
14766 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14768 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14769 "is %p",
14770 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14771 error_found = true;
14773 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14775 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14776 debug_tree (TYPE_CACHED_VALUES (t));
14777 error_found = true;
14779 /* Verify just enough of cache to ensure that no one copied it to new type.
14780 All copying should go by copy_node that should clear it. */
14781 else if (TYPE_CACHED_VALUES_P (t))
14783 int i;
14784 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14785 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14786 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14788 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14789 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14790 error_found = true;
14791 break;
14795 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14796 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14798 /* C++ FE uses TREE_PURPOSE to store initial values. */
14799 if (TREE_PURPOSE (l) && in_lto_p)
14801 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14802 debug_tree (l);
14803 error_found = true;
14805 if (!TYPE_P (TREE_VALUE (l)))
14807 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14808 debug_tree (l);
14809 error_found = true;
14812 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14814 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14815 debug_tree (TYPE_VALUES_RAW (t));
14816 error_found = true;
14818 if (TREE_CODE (t) != INTEGER_TYPE
14819 && TREE_CODE (t) != BOOLEAN_TYPE
14820 && TREE_CODE (t) != OFFSET_TYPE
14821 && TREE_CODE (t) != REFERENCE_TYPE
14822 && TREE_CODE (t) != NULLPTR_TYPE
14823 && TREE_CODE (t) != POINTER_TYPE
14824 && TYPE_CACHED_VALUES_P (t))
14826 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14827 error_found = true;
14830 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14831 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14832 of a type. */
14833 if (TREE_CODE (t) == METHOD_TYPE
14834 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14836 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14837 error_found = true;
14840 if (error_found)
14842 debug_tree (const_cast <tree> (t));
14843 internal_error ("%qs failed", __func__);
14848 /* Return 1 if ARG interpreted as signed in its precision is known to be
14849 always positive or 2 if ARG is known to be always negative, or 3 if
14850 ARG may be positive or negative. */
14853 get_range_pos_neg (tree arg)
14855 if (arg == error_mark_node)
14856 return 3;
14858 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14859 int cnt = 0;
14860 if (TREE_CODE (arg) == INTEGER_CST)
14862 wide_int w = wi::sext (wi::to_wide (arg), prec);
14863 if (wi::neg_p (w))
14864 return 2;
14865 else
14866 return 1;
14868 while (CONVERT_EXPR_P (arg)
14869 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14870 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14872 arg = TREE_OPERAND (arg, 0);
14873 /* Narrower value zero extended into wider type
14874 will always result in positive values. */
14875 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14876 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14877 return 1;
14878 prec = TYPE_PRECISION (TREE_TYPE (arg));
14879 if (++cnt > 30)
14880 return 3;
14883 if (TREE_CODE (arg) != SSA_NAME)
14884 return 3;
14885 wide_int arg_min, arg_max;
14886 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14888 gimple *g = SSA_NAME_DEF_STMT (arg);
14889 if (is_gimple_assign (g)
14890 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14892 tree t = gimple_assign_rhs1 (g);
14893 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14894 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14896 if (TYPE_UNSIGNED (TREE_TYPE (t))
14897 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14898 return 1;
14899 prec = TYPE_PRECISION (TREE_TYPE (t));
14900 arg = t;
14901 if (++cnt > 30)
14902 return 3;
14903 continue;
14906 return 3;
14908 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14910 /* For unsigned values, the "positive" range comes
14911 below the "negative" range. */
14912 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14913 return 1;
14914 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14915 return 2;
14917 else
14919 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14920 return 1;
14921 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14922 return 2;
14924 return 3;
14930 /* Return true if ARG is marked with the nonnull attribute in the
14931 current function signature. */
14933 bool
14934 nonnull_arg_p (const_tree arg)
14936 tree t, attrs, fntype;
14937 unsigned HOST_WIDE_INT arg_num;
14939 gcc_assert (TREE_CODE (arg) == PARM_DECL
14940 && (POINTER_TYPE_P (TREE_TYPE (arg))
14941 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14943 /* The static chain decl is always non null. */
14944 if (arg == cfun->static_chain_decl)
14945 return true;
14947 /* THIS argument of method is always non-NULL. */
14948 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14949 && arg == DECL_ARGUMENTS (cfun->decl)
14950 && flag_delete_null_pointer_checks)
14951 return true;
14953 /* Values passed by reference are always non-NULL. */
14954 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14955 && flag_delete_null_pointer_checks)
14956 return true;
14958 fntype = TREE_TYPE (cfun->decl);
14959 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14961 attrs = lookup_attribute ("nonnull", attrs);
14963 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14964 if (attrs == NULL_TREE)
14965 return false;
14967 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14968 if (TREE_VALUE (attrs) == NULL_TREE)
14969 return true;
14971 /* Get the position number for ARG in the function signature. */
14972 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14974 t = DECL_CHAIN (t), arg_num++)
14976 if (t == arg)
14977 break;
14980 gcc_assert (t == arg);
14982 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14983 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14985 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14986 return true;
14990 return false;
14993 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14994 information. */
14996 location_t
14997 set_block (location_t loc, tree block)
14999 location_t pure_loc = get_pure_location (loc);
15000 source_range src_range = get_range_from_loc (line_table, loc);
15001 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
15004 location_t
15005 set_source_range (tree expr, location_t start, location_t finish)
15007 source_range src_range;
15008 src_range.m_start = start;
15009 src_range.m_finish = finish;
15010 return set_source_range (expr, src_range);
15013 location_t
15014 set_source_range (tree expr, source_range src_range)
15016 if (!EXPR_P (expr))
15017 return UNKNOWN_LOCATION;
15019 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
15020 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
15021 pure_loc,
15022 src_range,
15023 NULL);
15024 SET_EXPR_LOCATION (expr, adhoc);
15025 return adhoc;
15028 /* Return EXPR, potentially wrapped with a node expression LOC,
15029 if !CAN_HAVE_LOCATION_P (expr).
15031 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
15032 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
15034 Wrapper nodes can be identified using location_wrapper_p. */
15036 tree
15037 maybe_wrap_with_location (tree expr, location_t loc)
15039 if (expr == NULL)
15040 return NULL;
15041 if (loc == UNKNOWN_LOCATION)
15042 return expr;
15043 if (CAN_HAVE_LOCATION_P (expr))
15044 return expr;
15045 /* We should only be adding wrappers for constants and for decls,
15046 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
15047 gcc_assert (CONSTANT_CLASS_P (expr)
15048 || DECL_P (expr)
15049 || EXCEPTIONAL_CLASS_P (expr));
15051 /* For now, don't add wrappers to exceptional tree nodes, to minimize
15052 any impact of the wrapper nodes. */
15053 if (EXCEPTIONAL_CLASS_P (expr))
15054 return expr;
15056 /* If any auto_suppress_location_wrappers are active, don't create
15057 wrappers. */
15058 if (suppress_location_wrappers > 0)
15059 return expr;
15061 tree_code code
15062 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
15063 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
15064 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
15065 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
15066 /* Mark this node as being a wrapper. */
15067 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
15068 return wrapper;
15071 int suppress_location_wrappers;
15073 /* Return the name of combined function FN, for debugging purposes. */
15075 const char *
15076 combined_fn_name (combined_fn fn)
15078 if (builtin_fn_p (fn))
15080 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
15081 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15083 else
15084 return internal_fn_name (as_internal_fn (fn));
15087 /* Return a bitmap with a bit set corresponding to each argument in
15088 a function call type FNTYPE declared with attribute nonnull,
15089 or null if none of the function's argument are nonnull. The caller
15090 must free the bitmap. */
15092 bitmap
15093 get_nonnull_args (const_tree fntype)
15095 if (fntype == NULL_TREE)
15096 return NULL;
15098 tree attrs = TYPE_ATTRIBUTES (fntype);
15099 if (!attrs)
15100 return NULL;
15102 bitmap argmap = NULL;
15104 /* A function declaration can specify multiple attribute nonnull,
15105 each with zero or more arguments. The loop below creates a bitmap
15106 representing a union of all the arguments. An empty (but non-null)
15107 bitmap means that all arguments have been declaraed nonnull. */
15108 for ( ; attrs; attrs = TREE_CHAIN (attrs))
15110 attrs = lookup_attribute ("nonnull", attrs);
15111 if (!attrs)
15112 break;
15114 if (!argmap)
15115 argmap = BITMAP_ALLOC (NULL);
15117 if (!TREE_VALUE (attrs))
15119 /* Clear the bitmap in case a previous attribute nonnull
15120 set it and this one overrides it for all arguments. */
15121 bitmap_clear (argmap);
15122 return argmap;
15125 /* Iterate over the indices of the format arguments declared nonnull
15126 and set a bit for each. */
15127 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15129 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15130 bitmap_set_bit (argmap, val);
15134 return argmap;
15137 /* Returns true if TYPE is a type where it and all of its subobjects
15138 (recursively) are of structure, union, or array type. */
15140 static bool
15141 default_is_empty_type (tree type)
15143 if (RECORD_OR_UNION_TYPE_P (type))
15145 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15146 if (TREE_CODE (field) == FIELD_DECL
15147 && !DECL_PADDING_P (field)
15148 && !default_is_empty_type (TREE_TYPE (field)))
15149 return false;
15150 return true;
15152 else if (TREE_CODE (type) == ARRAY_TYPE)
15153 return (integer_minus_onep (array_type_nelts (type))
15154 || TYPE_DOMAIN (type) == NULL_TREE
15155 || default_is_empty_type (TREE_TYPE (type)));
15156 return false;
15159 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15160 that shouldn't be passed via stack. */
15162 bool
15163 default_is_empty_record (const_tree type)
15165 if (!abi_version_at_least (12))
15166 return false;
15168 if (type == error_mark_node)
15169 return false;
15171 if (TREE_ADDRESSABLE (type))
15172 return false;
15174 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15177 /* Like int_size_in_bytes, but handle empty records specially. */
15179 HOST_WIDE_INT
15180 arg_int_size_in_bytes (const_tree type)
15182 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15185 /* Like size_in_bytes, but handle empty records specially. */
15187 tree
15188 arg_size_in_bytes (const_tree type)
15190 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15193 /* Return true if an expression with CODE has to have the same result type as
15194 its first operand. */
15196 bool
15197 expr_type_first_operand_type_p (tree_code code)
15199 switch (code)
15201 case NEGATE_EXPR:
15202 case ABS_EXPR:
15203 case BIT_NOT_EXPR:
15204 case PAREN_EXPR:
15205 case CONJ_EXPR:
15207 case PLUS_EXPR:
15208 case MINUS_EXPR:
15209 case MULT_EXPR:
15210 case TRUNC_DIV_EXPR:
15211 case CEIL_DIV_EXPR:
15212 case FLOOR_DIV_EXPR:
15213 case ROUND_DIV_EXPR:
15214 case TRUNC_MOD_EXPR:
15215 case CEIL_MOD_EXPR:
15216 case FLOOR_MOD_EXPR:
15217 case ROUND_MOD_EXPR:
15218 case RDIV_EXPR:
15219 case EXACT_DIV_EXPR:
15220 case MIN_EXPR:
15221 case MAX_EXPR:
15222 case BIT_IOR_EXPR:
15223 case BIT_XOR_EXPR:
15224 case BIT_AND_EXPR:
15226 case LSHIFT_EXPR:
15227 case RSHIFT_EXPR:
15228 case LROTATE_EXPR:
15229 case RROTATE_EXPR:
15230 return true;
15232 default:
15233 return false;
15237 /* Return a typenode for the "standard" C type with a given name. */
15238 tree
15239 get_typenode_from_name (const char *name)
15241 if (name == NULL || *name == '\0')
15242 return NULL_TREE;
15244 if (strcmp (name, "char") == 0)
15245 return char_type_node;
15246 if (strcmp (name, "unsigned char") == 0)
15247 return unsigned_char_type_node;
15248 if (strcmp (name, "signed char") == 0)
15249 return signed_char_type_node;
15251 if (strcmp (name, "short int") == 0)
15252 return short_integer_type_node;
15253 if (strcmp (name, "short unsigned int") == 0)
15254 return short_unsigned_type_node;
15256 if (strcmp (name, "int") == 0)
15257 return integer_type_node;
15258 if (strcmp (name, "unsigned int") == 0)
15259 return unsigned_type_node;
15261 if (strcmp (name, "long int") == 0)
15262 return long_integer_type_node;
15263 if (strcmp (name, "long unsigned int") == 0)
15264 return long_unsigned_type_node;
15266 if (strcmp (name, "long long int") == 0)
15267 return long_long_integer_type_node;
15268 if (strcmp (name, "long long unsigned int") == 0)
15269 return long_long_unsigned_type_node;
15271 gcc_unreachable ();
15274 /* List of pointer types used to declare builtins before we have seen their
15275 real declaration.
15277 Keep the size up to date in tree.h ! */
15278 const builtin_structptr_type builtin_structptr_types[6] =
15280 { fileptr_type_node, ptr_type_node, "FILE" },
15281 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15282 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15283 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15284 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15285 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15288 /* Return the maximum object size. */
15290 tree
15291 max_object_size (void)
15293 /* To do: Make this a configurable parameter. */
15294 return TYPE_MAX_VALUE (ptrdiff_type_node);
15297 #if CHECKING_P
15299 namespace selftest {
15301 /* Selftests for tree. */
15303 /* Verify that integer constants are sane. */
15305 static void
15306 test_integer_constants ()
15308 ASSERT_TRUE (integer_type_node != NULL);
15309 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15311 tree type = integer_type_node;
15313 tree zero = build_zero_cst (type);
15314 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15315 ASSERT_EQ (type, TREE_TYPE (zero));
15317 tree one = build_int_cst (type, 1);
15318 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15319 ASSERT_EQ (type, TREE_TYPE (zero));
15322 /* Verify identifiers. */
15324 static void
15325 test_identifiers ()
15327 tree identifier = get_identifier ("foo");
15328 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15329 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15332 /* Verify LABEL_DECL. */
15334 static void
15335 test_labels ()
15337 tree identifier = get_identifier ("err");
15338 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15339 identifier, void_type_node);
15340 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15341 ASSERT_FALSE (FORCED_LABEL (label_decl));
15344 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15345 are given by VALS. */
15347 static tree
15348 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15350 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15351 tree_vector_builder builder (type, vals.length (), 1);
15352 builder.splice (vals);
15353 return builder.build ();
15356 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15358 static void
15359 check_vector_cst (vec<tree> expected, tree actual)
15361 ASSERT_KNOWN_EQ (expected.length (),
15362 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15363 for (unsigned int i = 0; i < expected.length (); ++i)
15364 ASSERT_EQ (wi::to_wide (expected[i]),
15365 wi::to_wide (vector_cst_elt (actual, i)));
15368 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15369 and that its elements match EXPECTED. */
15371 static void
15372 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15373 unsigned int npatterns)
15375 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15376 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15377 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15378 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15379 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15380 check_vector_cst (expected, actual);
15383 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15384 and NPATTERNS background elements, and that its elements match
15385 EXPECTED. */
15387 static void
15388 check_vector_cst_fill (vec<tree> expected, tree actual,
15389 unsigned int npatterns)
15391 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15392 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15393 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15394 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15395 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15396 check_vector_cst (expected, actual);
15399 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15400 and that its elements match EXPECTED. */
15402 static void
15403 check_vector_cst_stepped (vec<tree> expected, tree actual,
15404 unsigned int npatterns)
15406 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15407 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15408 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15409 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15410 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15411 check_vector_cst (expected, actual);
15414 /* Test the creation of VECTOR_CSTs. */
15416 static void
15417 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15419 auto_vec<tree, 8> elements (8);
15420 elements.quick_grow (8);
15421 tree element_type = build_nonstandard_integer_type (16, true);
15422 tree vector_type = build_vector_type (element_type, 8);
15424 /* Test a simple linear series with a base of 0 and a step of 1:
15425 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15426 for (unsigned int i = 0; i < 8; ++i)
15427 elements[i] = build_int_cst (element_type, i);
15428 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15429 check_vector_cst_stepped (elements, vector, 1);
15431 /* Try the same with the first element replaced by 100:
15432 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15433 elements[0] = build_int_cst (element_type, 100);
15434 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15435 check_vector_cst_stepped (elements, vector, 1);
15437 /* Try a series that wraps around.
15438 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15439 for (unsigned int i = 1; i < 8; ++i)
15440 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15441 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15442 check_vector_cst_stepped (elements, vector, 1);
15444 /* Try a downward series:
15445 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15446 for (unsigned int i = 1; i < 8; ++i)
15447 elements[i] = build_int_cst (element_type, 80 - i);
15448 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15449 check_vector_cst_stepped (elements, vector, 1);
15451 /* Try two interleaved series with different bases and steps:
15452 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15453 elements[1] = build_int_cst (element_type, 53);
15454 for (unsigned int i = 2; i < 8; i += 2)
15456 elements[i] = build_int_cst (element_type, 70 - i * 2);
15457 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15459 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15460 check_vector_cst_stepped (elements, vector, 2);
15462 /* Try a duplicated value:
15463 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15464 for (unsigned int i = 1; i < 8; ++i)
15465 elements[i] = elements[0];
15466 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15467 check_vector_cst_duplicate (elements, vector, 1);
15469 /* Try an interleaved duplicated value:
15470 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15471 elements[1] = build_int_cst (element_type, 55);
15472 for (unsigned int i = 2; i < 8; ++i)
15473 elements[i] = elements[i - 2];
15474 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15475 check_vector_cst_duplicate (elements, vector, 2);
15477 /* Try a duplicated value with 2 exceptions
15478 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15479 elements[0] = build_int_cst (element_type, 41);
15480 elements[1] = build_int_cst (element_type, 97);
15481 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15482 check_vector_cst_fill (elements, vector, 2);
15484 /* Try with and without a step
15485 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15486 for (unsigned int i = 3; i < 8; i += 2)
15487 elements[i] = build_int_cst (element_type, i * 7);
15488 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15489 check_vector_cst_stepped (elements, vector, 2);
15491 /* Try a fully-general constant:
15492 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15493 elements[5] = build_int_cst (element_type, 9990);
15494 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15495 check_vector_cst_fill (elements, vector, 4);
15498 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15499 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15500 modifying its argument in-place. */
15502 static void
15503 check_strip_nops (tree node, tree expected)
15505 STRIP_NOPS (node);
15506 ASSERT_EQ (expected, node);
15509 /* Verify location wrappers. */
15511 static void
15512 test_location_wrappers ()
15514 location_t loc = BUILTINS_LOCATION;
15516 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15518 /* Wrapping a constant. */
15519 tree int_cst = build_int_cst (integer_type_node, 42);
15520 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15521 ASSERT_FALSE (location_wrapper_p (int_cst));
15523 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15524 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15525 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15526 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15528 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15529 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15531 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15532 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15533 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15534 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15536 /* Wrapping a STRING_CST. */
15537 tree string_cst = build_string (4, "foo");
15538 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15539 ASSERT_FALSE (location_wrapper_p (string_cst));
15541 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15542 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15543 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15544 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15545 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15548 /* Wrapping a variable. */
15549 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15550 get_identifier ("some_int_var"),
15551 integer_type_node);
15552 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15553 ASSERT_FALSE (location_wrapper_p (int_var));
15555 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15556 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15557 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15558 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15560 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15561 wrapper. */
15562 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15563 ASSERT_FALSE (location_wrapper_p (r_cast));
15564 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15566 /* Verify that STRIP_NOPS removes wrappers. */
15567 check_strip_nops (wrapped_int_cst, int_cst);
15568 check_strip_nops (wrapped_string_cst, string_cst);
15569 check_strip_nops (wrapped_int_var, int_var);
15572 /* Test various tree predicates. Verify that location wrappers don't
15573 affect the results. */
15575 static void
15576 test_predicates ()
15578 /* Build various constants and wrappers around them. */
15580 location_t loc = BUILTINS_LOCATION;
15582 tree i_0 = build_int_cst (integer_type_node, 0);
15583 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15585 tree i_1 = build_int_cst (integer_type_node, 1);
15586 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15588 tree i_m1 = build_int_cst (integer_type_node, -1);
15589 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15591 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15592 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15593 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15594 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15595 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15596 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15598 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15599 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15600 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15602 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15603 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15604 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15606 /* TODO: vector constants. */
15608 /* Test integer_onep. */
15609 ASSERT_FALSE (integer_onep (i_0));
15610 ASSERT_FALSE (integer_onep (wr_i_0));
15611 ASSERT_TRUE (integer_onep (i_1));
15612 ASSERT_TRUE (integer_onep (wr_i_1));
15613 ASSERT_FALSE (integer_onep (i_m1));
15614 ASSERT_FALSE (integer_onep (wr_i_m1));
15615 ASSERT_FALSE (integer_onep (f_0));
15616 ASSERT_FALSE (integer_onep (wr_f_0));
15617 ASSERT_FALSE (integer_onep (f_1));
15618 ASSERT_FALSE (integer_onep (wr_f_1));
15619 ASSERT_FALSE (integer_onep (f_m1));
15620 ASSERT_FALSE (integer_onep (wr_f_m1));
15621 ASSERT_FALSE (integer_onep (c_i_0));
15622 ASSERT_TRUE (integer_onep (c_i_1));
15623 ASSERT_FALSE (integer_onep (c_i_m1));
15624 ASSERT_FALSE (integer_onep (c_f_0));
15625 ASSERT_FALSE (integer_onep (c_f_1));
15626 ASSERT_FALSE (integer_onep (c_f_m1));
15628 /* Test integer_zerop. */
15629 ASSERT_TRUE (integer_zerop (i_0));
15630 ASSERT_TRUE (integer_zerop (wr_i_0));
15631 ASSERT_FALSE (integer_zerop (i_1));
15632 ASSERT_FALSE (integer_zerop (wr_i_1));
15633 ASSERT_FALSE (integer_zerop (i_m1));
15634 ASSERT_FALSE (integer_zerop (wr_i_m1));
15635 ASSERT_FALSE (integer_zerop (f_0));
15636 ASSERT_FALSE (integer_zerop (wr_f_0));
15637 ASSERT_FALSE (integer_zerop (f_1));
15638 ASSERT_FALSE (integer_zerop (wr_f_1));
15639 ASSERT_FALSE (integer_zerop (f_m1));
15640 ASSERT_FALSE (integer_zerop (wr_f_m1));
15641 ASSERT_TRUE (integer_zerop (c_i_0));
15642 ASSERT_FALSE (integer_zerop (c_i_1));
15643 ASSERT_FALSE (integer_zerop (c_i_m1));
15644 ASSERT_FALSE (integer_zerop (c_f_0));
15645 ASSERT_FALSE (integer_zerop (c_f_1));
15646 ASSERT_FALSE (integer_zerop (c_f_m1));
15648 /* Test integer_all_onesp. */
15649 ASSERT_FALSE (integer_all_onesp (i_0));
15650 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15651 ASSERT_FALSE (integer_all_onesp (i_1));
15652 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15653 ASSERT_TRUE (integer_all_onesp (i_m1));
15654 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15655 ASSERT_FALSE (integer_all_onesp (f_0));
15656 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15657 ASSERT_FALSE (integer_all_onesp (f_1));
15658 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15659 ASSERT_FALSE (integer_all_onesp (f_m1));
15660 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15661 ASSERT_FALSE (integer_all_onesp (c_i_0));
15662 ASSERT_FALSE (integer_all_onesp (c_i_1));
15663 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15664 ASSERT_FALSE (integer_all_onesp (c_f_0));
15665 ASSERT_FALSE (integer_all_onesp (c_f_1));
15666 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15668 /* Test integer_minus_onep. */
15669 ASSERT_FALSE (integer_minus_onep (i_0));
15670 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15671 ASSERT_FALSE (integer_minus_onep (i_1));
15672 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15673 ASSERT_TRUE (integer_minus_onep (i_m1));
15674 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15675 ASSERT_FALSE (integer_minus_onep (f_0));
15676 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15677 ASSERT_FALSE (integer_minus_onep (f_1));
15678 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15679 ASSERT_FALSE (integer_minus_onep (f_m1));
15680 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15681 ASSERT_FALSE (integer_minus_onep (c_i_0));
15682 ASSERT_FALSE (integer_minus_onep (c_i_1));
15683 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15684 ASSERT_FALSE (integer_minus_onep (c_f_0));
15685 ASSERT_FALSE (integer_minus_onep (c_f_1));
15686 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15688 /* Test integer_each_onep. */
15689 ASSERT_FALSE (integer_each_onep (i_0));
15690 ASSERT_FALSE (integer_each_onep (wr_i_0));
15691 ASSERT_TRUE (integer_each_onep (i_1));
15692 ASSERT_TRUE (integer_each_onep (wr_i_1));
15693 ASSERT_FALSE (integer_each_onep (i_m1));
15694 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15695 ASSERT_FALSE (integer_each_onep (f_0));
15696 ASSERT_FALSE (integer_each_onep (wr_f_0));
15697 ASSERT_FALSE (integer_each_onep (f_1));
15698 ASSERT_FALSE (integer_each_onep (wr_f_1));
15699 ASSERT_FALSE (integer_each_onep (f_m1));
15700 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15701 ASSERT_FALSE (integer_each_onep (c_i_0));
15702 ASSERT_FALSE (integer_each_onep (c_i_1));
15703 ASSERT_FALSE (integer_each_onep (c_i_m1));
15704 ASSERT_FALSE (integer_each_onep (c_f_0));
15705 ASSERT_FALSE (integer_each_onep (c_f_1));
15706 ASSERT_FALSE (integer_each_onep (c_f_m1));
15708 /* Test integer_truep. */
15709 ASSERT_FALSE (integer_truep (i_0));
15710 ASSERT_FALSE (integer_truep (wr_i_0));
15711 ASSERT_TRUE (integer_truep (i_1));
15712 ASSERT_TRUE (integer_truep (wr_i_1));
15713 ASSERT_FALSE (integer_truep (i_m1));
15714 ASSERT_FALSE (integer_truep (wr_i_m1));
15715 ASSERT_FALSE (integer_truep (f_0));
15716 ASSERT_FALSE (integer_truep (wr_f_0));
15717 ASSERT_FALSE (integer_truep (f_1));
15718 ASSERT_FALSE (integer_truep (wr_f_1));
15719 ASSERT_FALSE (integer_truep (f_m1));
15720 ASSERT_FALSE (integer_truep (wr_f_m1));
15721 ASSERT_FALSE (integer_truep (c_i_0));
15722 ASSERT_TRUE (integer_truep (c_i_1));
15723 ASSERT_FALSE (integer_truep (c_i_m1));
15724 ASSERT_FALSE (integer_truep (c_f_0));
15725 ASSERT_FALSE (integer_truep (c_f_1));
15726 ASSERT_FALSE (integer_truep (c_f_m1));
15728 /* Test integer_nonzerop. */
15729 ASSERT_FALSE (integer_nonzerop (i_0));
15730 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15731 ASSERT_TRUE (integer_nonzerop (i_1));
15732 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15733 ASSERT_TRUE (integer_nonzerop (i_m1));
15734 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15735 ASSERT_FALSE (integer_nonzerop (f_0));
15736 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15737 ASSERT_FALSE (integer_nonzerop (f_1));
15738 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15739 ASSERT_FALSE (integer_nonzerop (f_m1));
15740 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15741 ASSERT_FALSE (integer_nonzerop (c_i_0));
15742 ASSERT_TRUE (integer_nonzerop (c_i_1));
15743 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15744 ASSERT_FALSE (integer_nonzerop (c_f_0));
15745 ASSERT_FALSE (integer_nonzerop (c_f_1));
15746 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15748 /* Test real_zerop. */
15749 ASSERT_FALSE (real_zerop (i_0));
15750 ASSERT_FALSE (real_zerop (wr_i_0));
15751 ASSERT_FALSE (real_zerop (i_1));
15752 ASSERT_FALSE (real_zerop (wr_i_1));
15753 ASSERT_FALSE (real_zerop (i_m1));
15754 ASSERT_FALSE (real_zerop (wr_i_m1));
15755 ASSERT_TRUE (real_zerop (f_0));
15756 ASSERT_TRUE (real_zerop (wr_f_0));
15757 ASSERT_FALSE (real_zerop (f_1));
15758 ASSERT_FALSE (real_zerop (wr_f_1));
15759 ASSERT_FALSE (real_zerop (f_m1));
15760 ASSERT_FALSE (real_zerop (wr_f_m1));
15761 ASSERT_FALSE (real_zerop (c_i_0));
15762 ASSERT_FALSE (real_zerop (c_i_1));
15763 ASSERT_FALSE (real_zerop (c_i_m1));
15764 ASSERT_TRUE (real_zerop (c_f_0));
15765 ASSERT_FALSE (real_zerop (c_f_1));
15766 ASSERT_FALSE (real_zerop (c_f_m1));
15768 /* Test real_onep. */
15769 ASSERT_FALSE (real_onep (i_0));
15770 ASSERT_FALSE (real_onep (wr_i_0));
15771 ASSERT_FALSE (real_onep (i_1));
15772 ASSERT_FALSE (real_onep (wr_i_1));
15773 ASSERT_FALSE (real_onep (i_m1));
15774 ASSERT_FALSE (real_onep (wr_i_m1));
15775 ASSERT_FALSE (real_onep (f_0));
15776 ASSERT_FALSE (real_onep (wr_f_0));
15777 ASSERT_TRUE (real_onep (f_1));
15778 ASSERT_TRUE (real_onep (wr_f_1));
15779 ASSERT_FALSE (real_onep (f_m1));
15780 ASSERT_FALSE (real_onep (wr_f_m1));
15781 ASSERT_FALSE (real_onep (c_i_0));
15782 ASSERT_FALSE (real_onep (c_i_1));
15783 ASSERT_FALSE (real_onep (c_i_m1));
15784 ASSERT_FALSE (real_onep (c_f_0));
15785 ASSERT_TRUE (real_onep (c_f_1));
15786 ASSERT_FALSE (real_onep (c_f_m1));
15788 /* Test real_minus_onep. */
15789 ASSERT_FALSE (real_minus_onep (i_0));
15790 ASSERT_FALSE (real_minus_onep (wr_i_0));
15791 ASSERT_FALSE (real_minus_onep (i_1));
15792 ASSERT_FALSE (real_minus_onep (wr_i_1));
15793 ASSERT_FALSE (real_minus_onep (i_m1));
15794 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15795 ASSERT_FALSE (real_minus_onep (f_0));
15796 ASSERT_FALSE (real_minus_onep (wr_f_0));
15797 ASSERT_FALSE (real_minus_onep (f_1));
15798 ASSERT_FALSE (real_minus_onep (wr_f_1));
15799 ASSERT_TRUE (real_minus_onep (f_m1));
15800 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15801 ASSERT_FALSE (real_minus_onep (c_i_0));
15802 ASSERT_FALSE (real_minus_onep (c_i_1));
15803 ASSERT_FALSE (real_minus_onep (c_i_m1));
15804 ASSERT_FALSE (real_minus_onep (c_f_0));
15805 ASSERT_FALSE (real_minus_onep (c_f_1));
15806 ASSERT_TRUE (real_minus_onep (c_f_m1));
15808 /* Test zerop. */
15809 ASSERT_TRUE (zerop (i_0));
15810 ASSERT_TRUE (zerop (wr_i_0));
15811 ASSERT_FALSE (zerop (i_1));
15812 ASSERT_FALSE (zerop (wr_i_1));
15813 ASSERT_FALSE (zerop (i_m1));
15814 ASSERT_FALSE (zerop (wr_i_m1));
15815 ASSERT_TRUE (zerop (f_0));
15816 ASSERT_TRUE (zerop (wr_f_0));
15817 ASSERT_FALSE (zerop (f_1));
15818 ASSERT_FALSE (zerop (wr_f_1));
15819 ASSERT_FALSE (zerop (f_m1));
15820 ASSERT_FALSE (zerop (wr_f_m1));
15821 ASSERT_TRUE (zerop (c_i_0));
15822 ASSERT_FALSE (zerop (c_i_1));
15823 ASSERT_FALSE (zerop (c_i_m1));
15824 ASSERT_TRUE (zerop (c_f_0));
15825 ASSERT_FALSE (zerop (c_f_1));
15826 ASSERT_FALSE (zerop (c_f_m1));
15828 /* Test tree_expr_nonnegative_p. */
15829 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15830 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15831 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15832 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15833 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15834 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15835 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15836 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15837 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15838 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15839 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15840 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15841 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15842 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15843 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15844 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15845 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15846 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15848 /* Test tree_expr_nonzero_p. */
15849 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15850 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15851 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15852 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15853 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15854 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15856 /* Test integer_valued_real_p. */
15857 ASSERT_FALSE (integer_valued_real_p (i_0));
15858 ASSERT_TRUE (integer_valued_real_p (f_0));
15859 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15860 ASSERT_TRUE (integer_valued_real_p (f_1));
15861 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15863 /* Test integer_pow2p. */
15864 ASSERT_FALSE (integer_pow2p (i_0));
15865 ASSERT_TRUE (integer_pow2p (i_1));
15866 ASSERT_TRUE (integer_pow2p (wr_i_1));
15868 /* Test uniform_integer_cst_p. */
15869 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15870 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15871 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15872 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15873 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15874 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15875 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15876 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15877 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15878 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15879 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15880 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15881 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15882 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15883 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15884 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15885 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15886 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15889 /* Check that string escaping works correctly. */
15891 static void
15892 test_escaped_strings (void)
15894 int saved_cutoff;
15895 escaped_string msg;
15897 msg.escape (NULL);
15898 /* ASSERT_STREQ does not accept NULL as a valid test
15899 result, so we have to use ASSERT_EQ instead. */
15900 ASSERT_EQ (NULL, (const char *) msg);
15902 msg.escape ("");
15903 ASSERT_STREQ ("", (const char *) msg);
15905 msg.escape ("foobar");
15906 ASSERT_STREQ ("foobar", (const char *) msg);
15908 /* Ensure that we have -fmessage-length set to 0. */
15909 saved_cutoff = pp_line_cutoff (global_dc->printer);
15910 pp_line_cutoff (global_dc->printer) = 0;
15912 msg.escape ("foo\nbar");
15913 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15915 msg.escape ("\a\b\f\n\r\t\v");
15916 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15918 /* Now repeat the tests with -fmessage-length set to 5. */
15919 pp_line_cutoff (global_dc->printer) = 5;
15921 /* Note that the newline is not translated into an escape. */
15922 msg.escape ("foo\nbar");
15923 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15925 msg.escape ("\a\b\f\n\r\t\v");
15926 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15928 /* Restore the original message length setting. */
15929 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15932 /* Run all of the selftests within this file. */
15934 void
15935 tree_c_tests ()
15937 test_integer_constants ();
15938 test_identifiers ();
15939 test_labels ();
15940 test_vector_cst_patterns ();
15941 test_location_wrappers ();
15942 test_predicates ();
15943 test_escaped_strings ();
15946 } // namespace selftest
15948 #endif /* CHECKING_P */
15950 #include "gt-tree.h"