[Ada] Add special bypass for obsolete code pattern
[official-gcc.git] / gcc / tree.c
blob154ef3ac38ff4901aa4d67a6eda6ed6b399fe6d5
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static uint64_t tree_code_counts[MAX_TREE_CODES];
133 uint64_t tree_node_counts[(int) all_kinds];
134 uint64_t tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 /* General tree->tree mapping structure for use in hash tables. */
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
250 return a->base.from == b->base.from;
253 static int
254 keep_cache_entry (tree_vec_map *&m)
256 return ggc_marked_p (m->base.from);
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
268 static tree build_array_type_1 (tree, tree, bool, bool);
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 bool tree_contains_struct[MAX_TREE_CODES][64];
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 2, /* OMP_CLAUSE_ALIGNED */
293 1, /* OMP_CLAUSE_DEPEND */
294 1, /* OMP_CLAUSE_NONTEMPORAL */
295 1, /* OMP_CLAUSE_UNIFORM */
296 1, /* OMP_CLAUSE_TO_DECLARE */
297 1, /* OMP_CLAUSE_LINK */
298 2, /* OMP_CLAUSE_FROM */
299 2, /* OMP_CLAUSE_TO */
300 2, /* OMP_CLAUSE_MAP */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE__CACHE_ */
307 2, /* OMP_CLAUSE_GANG */
308 1, /* OMP_CLAUSE_ASYNC */
309 1, /* OMP_CLAUSE_WAIT */
310 0, /* OMP_CLAUSE_AUTO */
311 0, /* OMP_CLAUSE_SEQ */
312 1, /* OMP_CLAUSE__LOOPTEMP_ */
313 1, /* OMP_CLAUSE__REDUCTEMP_ */
314 1, /* OMP_CLAUSE__CONDTEMP_ */
315 1, /* OMP_CLAUSE__SCANTEMP_ */
316 1, /* OMP_CLAUSE_IF */
317 1, /* OMP_CLAUSE_NUM_THREADS */
318 1, /* OMP_CLAUSE_SCHEDULE */
319 0, /* OMP_CLAUSE_NOWAIT */
320 1, /* OMP_CLAUSE_ORDERED */
321 0, /* OMP_CLAUSE_DEFAULT */
322 3, /* OMP_CLAUSE_COLLAPSE */
323 0, /* OMP_CLAUSE_UNTIED */
324 1, /* OMP_CLAUSE_FINAL */
325 0, /* OMP_CLAUSE_MERGEABLE */
326 1, /* OMP_CLAUSE_DEVICE */
327 1, /* OMP_CLAUSE_DIST_SCHEDULE */
328 0, /* OMP_CLAUSE_INBRANCH */
329 0, /* OMP_CLAUSE_NOTINBRANCH */
330 1, /* OMP_CLAUSE_NUM_TEAMS */
331 1, /* OMP_CLAUSE_THREAD_LIMIT */
332 0, /* OMP_CLAUSE_PROC_BIND */
333 1, /* OMP_CLAUSE_SAFELEN */
334 1, /* OMP_CLAUSE_SIMDLEN */
335 0, /* OMP_CLAUSE_DEVICE_TYPE */
336 0, /* OMP_CLAUSE_FOR */
337 0, /* OMP_CLAUSE_PARALLEL */
338 0, /* OMP_CLAUSE_SECTIONS */
339 0, /* OMP_CLAUSE_TASKGROUP */
340 1, /* OMP_CLAUSE_PRIORITY */
341 1, /* OMP_CLAUSE_GRAINSIZE */
342 1, /* OMP_CLAUSE_NUM_TASKS */
343 0, /* OMP_CLAUSE_NOGROUP */
344 0, /* OMP_CLAUSE_THREADS */
345 0, /* OMP_CLAUSE_SIMD */
346 1, /* OMP_CLAUSE_HINT */
347 0, /* OMP_CLAUSE_DEFAULTMAP */
348 0, /* OMP_CLAUSE_ORDER */
349 0, /* OMP_CLAUSE_BIND */
350 1, /* OMP_CLAUSE__SIMDUID_ */
351 0, /* OMP_CLAUSE__SIMT_ */
352 0, /* OMP_CLAUSE_INDEPENDENT */
353 1, /* OMP_CLAUSE_WORKER */
354 1, /* OMP_CLAUSE_VECTOR */
355 1, /* OMP_CLAUSE_NUM_GANGS */
356 1, /* OMP_CLAUSE_NUM_WORKERS */
357 1, /* OMP_CLAUSE_VECTOR_LENGTH */
358 3, /* OMP_CLAUSE_TILE */
359 2, /* OMP_CLAUSE__GRIDDIM_ */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
364 const char * const omp_clause_code_name[] =
366 "error_clause",
367 "private",
368 "shared",
369 "firstprivate",
370 "lastprivate",
371 "reduction",
372 "task_reduction",
373 "in_reduction",
374 "copyin",
375 "copyprivate",
376 "linear",
377 "aligned",
378 "depend",
379 "nontemporal",
380 "uniform",
381 "to",
382 "link",
383 "from",
384 "to",
385 "map",
386 "use_device_ptr",
387 "use_device_addr",
388 "is_device_ptr",
389 "inclusive",
390 "exclusive",
391 "_cache_",
392 "gang",
393 "async",
394 "wait",
395 "auto",
396 "seq",
397 "_looptemp_",
398 "_reductemp_",
399 "_condtemp_",
400 "_scantemp_",
401 "if",
402 "num_threads",
403 "schedule",
404 "nowait",
405 "ordered",
406 "default",
407 "collapse",
408 "untied",
409 "final",
410 "mergeable",
411 "device",
412 "dist_schedule",
413 "inbranch",
414 "notinbranch",
415 "num_teams",
416 "thread_limit",
417 "proc_bind",
418 "safelen",
419 "simdlen",
420 "device_type",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "priority",
426 "grainsize",
427 "num_tasks",
428 "nogroup",
429 "threads",
430 "simd",
431 "hint",
432 "defaultmap",
433 "order",
434 "bind",
435 "_simduid_",
436 "_simt_",
437 "independent",
438 "worker",
439 "vector",
440 "num_gangs",
441 "num_workers",
442 "vector_length",
443 "tile",
444 "_griddim_",
445 "if_present",
446 "finalize",
450 /* Return the tree node structure used by tree code CODE. */
452 static inline enum tree_node_structure_enum
453 tree_node_structure_for_code (enum tree_code code)
455 switch (TREE_CODE_CLASS (code))
457 case tcc_declaration:
459 switch (code)
461 case FIELD_DECL:
462 return TS_FIELD_DECL;
463 case PARM_DECL:
464 return TS_PARM_DECL;
465 case VAR_DECL:
466 return TS_VAR_DECL;
467 case LABEL_DECL:
468 return TS_LABEL_DECL;
469 case RESULT_DECL:
470 return TS_RESULT_DECL;
471 case DEBUG_EXPR_DECL:
472 return TS_DECL_WRTL;
473 case CONST_DECL:
474 return TS_CONST_DECL;
475 case TYPE_DECL:
476 return TS_TYPE_DECL;
477 case FUNCTION_DECL:
478 return TS_FUNCTION_DECL;
479 case TRANSLATION_UNIT_DECL:
480 return TS_TRANSLATION_UNIT_DECL;
481 default:
482 return TS_DECL_NON_COMMON;
485 case tcc_type:
486 return TS_TYPE_NON_COMMON;
487 case tcc_reference:
488 case tcc_comparison:
489 case tcc_unary:
490 case tcc_binary:
491 case tcc_expression:
492 case tcc_statement:
493 case tcc_vl_exp:
494 return TS_EXP;
495 default: /* tcc_constant and tcc_exceptional */
496 break;
498 switch (code)
500 /* tcc_constant cases. */
501 case VOID_CST: return TS_TYPED;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case FIXED_CST: return TS_FIXED_CST;
506 case COMPLEX_CST: return TS_COMPLEX;
507 case VECTOR_CST: return TS_VECTOR;
508 case STRING_CST: return TS_STRING;
509 /* tcc_exceptional cases. */
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
514 case SSA_NAME: return TS_SSA_NAME;
515 case PLACEHOLDER_EXPR: return TS_COMMON;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case BLOCK: return TS_BLOCK;
518 case CONSTRUCTOR: return TS_CONSTRUCTOR;
519 case TREE_BINFO: return TS_BINFO;
520 case OMP_CLAUSE: return TS_OMP_CLAUSE;
521 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
522 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
524 default:
525 gcc_unreachable ();
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
533 static void
534 initialize_tree_contains_struct (void)
536 unsigned i;
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
629 default:
630 gcc_unreachable ();
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
678 /* Init tree.c. */
680 void
681 init_ttree (void)
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
697 int_cst_node = make_int_cst (1, 1);
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
732 /* When the target supports COMDAT groups, this indicates which group the
733 DECL is associated with. This can be either an IDENTIFIER_NODE or a
734 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
735 tree
736 decl_comdat_group (const_tree node)
738 struct symtab_node *snode = symtab_node::get (node);
739 if (!snode)
740 return NULL;
741 return snode->get_comdat_group ();
744 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
745 tree
746 decl_comdat_group_id (const_tree node)
748 struct symtab_node *snode = symtab_node::get (node);
749 if (!snode)
750 return NULL;
751 return snode->get_comdat_group_id ();
754 /* When the target supports named section, return its name as IDENTIFIER_NODE
755 or NULL if it is in no section. */
756 const char *
757 decl_section_name (const_tree node)
759 struct symtab_node *snode = symtab_node::get (node);
760 if (!snode)
761 return NULL;
762 return snode->get_section ();
765 /* Set section name of NODE to VALUE (that is expected to be
766 identifier node) */
767 void
768 set_decl_section_name (tree node, const char *value)
770 struct symtab_node *snode;
772 if (value == NULL)
774 snode = symtab_node::get (node);
775 if (!snode)
776 return;
778 else if (VAR_P (node))
779 snode = varpool_node::get_create (node);
780 else
781 snode = cgraph_node::get_create (node);
782 snode->set_section (value);
785 /* Return TLS model of a variable NODE. */
786 enum tls_model
787 decl_tls_model (const_tree node)
789 struct varpool_node *snode = varpool_node::get (node);
790 if (!snode)
791 return TLS_MODEL_NONE;
792 return snode->tls_model;
795 /* Set TLS model of variable NODE to MODEL. */
796 void
797 set_decl_tls_model (tree node, enum tls_model model)
799 struct varpool_node *vnode;
801 if (model == TLS_MODEL_NONE)
803 vnode = varpool_node::get (node);
804 if (!vnode)
805 return;
807 else
808 vnode = varpool_node::get_create (node);
809 vnode->tls_model = model;
812 /* Compute the number of bytes occupied by a tree with code CODE.
813 This function cannot be used for nodes that have variable sizes,
814 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
815 size_t
816 tree_code_size (enum tree_code code)
818 switch (TREE_CODE_CLASS (code))
820 case tcc_declaration: /* A decl node */
821 switch (code)
823 case FIELD_DECL: return sizeof (tree_field_decl);
824 case PARM_DECL: return sizeof (tree_parm_decl);
825 case VAR_DECL: return sizeof (tree_var_decl);
826 case LABEL_DECL: return sizeof (tree_label_decl);
827 case RESULT_DECL: return sizeof (tree_result_decl);
828 case CONST_DECL: return sizeof (tree_const_decl);
829 case TYPE_DECL: return sizeof (tree_type_decl);
830 case FUNCTION_DECL: return sizeof (tree_function_decl);
831 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
832 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
833 case NAMESPACE_DECL:
834 case IMPORTED_DECL:
835 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
836 default:
837 gcc_checking_assert (code >= NUM_TREE_CODES);
838 return lang_hooks.tree_size (code);
841 case tcc_type: /* a type node */
842 switch (code)
844 case OFFSET_TYPE:
845 case ENUMERAL_TYPE:
846 case BOOLEAN_TYPE:
847 case INTEGER_TYPE:
848 case REAL_TYPE:
849 case POINTER_TYPE:
850 case REFERENCE_TYPE:
851 case NULLPTR_TYPE:
852 case FIXED_POINT_TYPE:
853 case COMPLEX_TYPE:
854 case VECTOR_TYPE:
855 case ARRAY_TYPE:
856 case RECORD_TYPE:
857 case UNION_TYPE:
858 case QUAL_UNION_TYPE:
859 case VOID_TYPE:
860 case FUNCTION_TYPE:
861 case METHOD_TYPE:
862 case LANG_TYPE: return sizeof (tree_type_non_common);
863 default:
864 gcc_checking_assert (code >= NUM_TREE_CODES);
865 return lang_hooks.tree_size (code);
868 case tcc_reference: /* a reference */
869 case tcc_expression: /* an expression */
870 case tcc_statement: /* an expression with side effects */
871 case tcc_comparison: /* a comparison expression */
872 case tcc_unary: /* a unary arithmetic expression */
873 case tcc_binary: /* a binary arithmetic expression */
874 return (sizeof (struct tree_exp)
875 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
877 case tcc_constant: /* a constant */
878 switch (code)
880 case VOID_CST: return sizeof (tree_typed);
881 case INTEGER_CST: gcc_unreachable ();
882 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
883 case REAL_CST: return sizeof (tree_real_cst);
884 case FIXED_CST: return sizeof (tree_fixed_cst);
885 case COMPLEX_CST: return sizeof (tree_complex);
886 case VECTOR_CST: gcc_unreachable ();
887 case STRING_CST: gcc_unreachable ();
888 default:
889 gcc_checking_assert (code >= NUM_TREE_CODES);
890 return lang_hooks.tree_size (code);
893 case tcc_exceptional: /* something random, like an identifier. */
894 switch (code)
896 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
897 case TREE_LIST: return sizeof (tree_list);
899 case ERROR_MARK:
900 case PLACEHOLDER_EXPR: return sizeof (tree_common);
902 case TREE_VEC: gcc_unreachable ();
903 case OMP_CLAUSE: gcc_unreachable ();
905 case SSA_NAME: return sizeof (tree_ssa_name);
907 case STATEMENT_LIST: return sizeof (tree_statement_list);
908 case BLOCK: return sizeof (struct tree_block);
909 case CONSTRUCTOR: return sizeof (tree_constructor);
910 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
911 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
913 default:
914 gcc_checking_assert (code >= NUM_TREE_CODES);
915 return lang_hooks.tree_size (code);
918 default:
919 gcc_unreachable ();
923 /* Compute the number of bytes occupied by NODE. This routine only
924 looks at TREE_CODE, except for those nodes that have variable sizes. */
925 size_t
926 tree_size (const_tree node)
928 const enum tree_code code = TREE_CODE (node);
929 switch (code)
931 case INTEGER_CST:
932 return (sizeof (struct tree_int_cst)
933 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
935 case TREE_BINFO:
936 return (offsetof (struct tree_binfo, base_binfos)
937 + vec<tree, va_gc>
938 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
940 case TREE_VEC:
941 return (sizeof (struct tree_vec)
942 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
944 case VECTOR_CST:
945 return (sizeof (struct tree_vector)
946 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
948 case STRING_CST:
949 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
951 case OMP_CLAUSE:
952 return (sizeof (struct tree_omp_clause)
953 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
954 * sizeof (tree));
956 default:
957 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
958 return (sizeof (struct tree_exp)
959 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
960 else
961 return tree_code_size (code);
965 /* Return tree node kind based on tree CODE. */
967 static tree_node_kind
968 get_stats_node_kind (enum tree_code code)
970 enum tree_code_class type = TREE_CODE_CLASS (code);
972 switch (type)
974 case tcc_declaration: /* A decl node */
975 return d_kind;
976 case tcc_type: /* a type node */
977 return t_kind;
978 case tcc_statement: /* an expression with side effects */
979 return s_kind;
980 case tcc_reference: /* a reference */
981 return r_kind;
982 case tcc_expression: /* an expression */
983 case tcc_comparison: /* a comparison expression */
984 case tcc_unary: /* a unary arithmetic expression */
985 case tcc_binary: /* a binary arithmetic expression */
986 return e_kind;
987 case tcc_constant: /* a constant */
988 return c_kind;
989 case tcc_exceptional: /* something random, like an identifier. */
990 switch (code)
992 case IDENTIFIER_NODE:
993 return id_kind;
994 case TREE_VEC:
995 return vec_kind;
996 case TREE_BINFO:
997 return binfo_kind;
998 case SSA_NAME:
999 return ssa_name_kind;
1000 case BLOCK:
1001 return b_kind;
1002 case CONSTRUCTOR:
1003 return constr_kind;
1004 case OMP_CLAUSE:
1005 return omp_clause_kind;
1006 default:
1007 return x_kind;
1009 break;
1010 case tcc_vl_exp:
1011 return e_kind;
1012 default:
1013 gcc_unreachable ();
1017 /* Record interesting allocation statistics for a tree node with CODE
1018 and LENGTH. */
1020 static void
1021 record_node_allocation_statistics (enum tree_code code, size_t length)
1023 if (!GATHER_STATISTICS)
1024 return;
1026 tree_node_kind kind = get_stats_node_kind (code);
1028 tree_code_counts[(int) code]++;
1029 tree_node_counts[(int) kind]++;
1030 tree_node_sizes[(int) kind] += length;
1033 /* Allocate and return a new UID from the DECL_UID namespace. */
1036 allocate_decl_uid (void)
1038 return next_decl_uid++;
1041 /* Return a newly allocated node of code CODE. For decl and type
1042 nodes, some other fields are initialized. The rest of the node is
1043 initialized to zero. This function cannot be used for TREE_VEC,
1044 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1045 tree_code_size.
1047 Achoo! I got a code in the node. */
1049 tree
1050 make_node (enum tree_code code MEM_STAT_DECL)
1052 tree t;
1053 enum tree_code_class type = TREE_CODE_CLASS (code);
1054 size_t length = tree_code_size (code);
1056 record_node_allocation_statistics (code, length);
1058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1059 TREE_SET_CODE (t, code);
1061 switch (type)
1063 case tcc_statement:
1064 if (code != DEBUG_BEGIN_STMT)
1065 TREE_SIDE_EFFECTS (t) = 1;
1066 break;
1068 case tcc_declaration:
1069 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1071 if (code == FUNCTION_DECL)
1073 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1074 SET_DECL_MODE (t, FUNCTION_MODE);
1076 else
1077 SET_DECL_ALIGN (t, 1);
1079 DECL_SOURCE_LOCATION (t) = input_location;
1080 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1081 DECL_UID (t) = --next_debug_decl_uid;
1082 else
1084 DECL_UID (t) = allocate_decl_uid ();
1085 SET_DECL_PT_UID (t, -1);
1087 if (TREE_CODE (t) == LABEL_DECL)
1088 LABEL_DECL_UID (t) = -1;
1090 break;
1092 case tcc_type:
1093 TYPE_UID (t) = next_type_uid++;
1094 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1095 TYPE_USER_ALIGN (t) = 0;
1096 TYPE_MAIN_VARIANT (t) = t;
1097 TYPE_CANONICAL (t) = t;
1099 /* Default to no attributes for type, but let target change that. */
1100 TYPE_ATTRIBUTES (t) = NULL_TREE;
1101 targetm.set_default_type_attributes (t);
1103 /* We have not yet computed the alias set for this type. */
1104 TYPE_ALIAS_SET (t) = -1;
1105 break;
1107 case tcc_constant:
1108 TREE_CONSTANT (t) = 1;
1109 break;
1111 case tcc_expression:
1112 switch (code)
1114 case INIT_EXPR:
1115 case MODIFY_EXPR:
1116 case VA_ARG_EXPR:
1117 case PREDECREMENT_EXPR:
1118 case PREINCREMENT_EXPR:
1119 case POSTDECREMENT_EXPR:
1120 case POSTINCREMENT_EXPR:
1121 /* All of these have side-effects, no matter what their
1122 operands are. */
1123 TREE_SIDE_EFFECTS (t) = 1;
1124 break;
1126 default:
1127 break;
1129 break;
1131 case tcc_exceptional:
1132 switch (code)
1134 case TARGET_OPTION_NODE:
1135 TREE_TARGET_OPTION(t)
1136 = ggc_cleared_alloc<struct cl_target_option> ();
1137 break;
1139 case OPTIMIZATION_NODE:
1140 TREE_OPTIMIZATION (t)
1141 = ggc_cleared_alloc<struct cl_optimization> ();
1142 break;
1144 default:
1145 break;
1147 break;
1149 default:
1150 /* Other classes need no special treatment. */
1151 break;
1154 return t;
1157 /* Free tree node. */
1159 void
1160 free_node (tree node)
1162 enum tree_code code = TREE_CODE (node);
1163 if (GATHER_STATISTICS)
1165 enum tree_node_kind kind = get_stats_node_kind (code);
1167 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1168 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1169 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1171 tree_code_counts[(int) TREE_CODE (node)]--;
1172 tree_node_counts[(int) kind]--;
1173 tree_node_sizes[(int) kind] -= tree_size (node);
1175 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1176 vec_free (CONSTRUCTOR_ELTS (node));
1177 else if (code == BLOCK)
1178 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1179 else if (code == TREE_BINFO)
1180 vec_free (BINFO_BASE_ACCESSES (node));
1181 ggc_free (node);
1184 /* Return a new node with the same contents as NODE except that its
1185 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1187 tree
1188 copy_node (tree node MEM_STAT_DECL)
1190 tree t;
1191 enum tree_code code = TREE_CODE (node);
1192 size_t length;
1194 gcc_assert (code != STATEMENT_LIST);
1196 length = tree_size (node);
1197 record_node_allocation_statistics (code, length);
1198 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1199 memcpy (t, node, length);
1201 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1202 TREE_CHAIN (t) = 0;
1203 TREE_ASM_WRITTEN (t) = 0;
1204 TREE_VISITED (t) = 0;
1206 if (TREE_CODE_CLASS (code) == tcc_declaration)
1208 if (code == DEBUG_EXPR_DECL)
1209 DECL_UID (t) = --next_debug_decl_uid;
1210 else
1212 DECL_UID (t) = allocate_decl_uid ();
1213 if (DECL_PT_UID_SET_P (node))
1214 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1216 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1217 && DECL_HAS_VALUE_EXPR_P (node))
1219 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1220 DECL_HAS_VALUE_EXPR_P (t) = 1;
1222 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1223 if (VAR_P (node))
1225 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1226 t->decl_with_vis.symtab_node = NULL;
1228 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1230 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1231 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1233 if (TREE_CODE (node) == FUNCTION_DECL)
1235 DECL_STRUCT_FUNCTION (t) = NULL;
1236 t->decl_with_vis.symtab_node = NULL;
1239 else if (TREE_CODE_CLASS (code) == tcc_type)
1241 TYPE_UID (t) = next_type_uid++;
1242 /* The following is so that the debug code for
1243 the copy is different from the original type.
1244 The two statements usually duplicate each other
1245 (because they clear fields of the same union),
1246 but the optimizer should catch that. */
1247 TYPE_SYMTAB_ADDRESS (t) = 0;
1248 TYPE_SYMTAB_DIE (t) = 0;
1250 /* Do not copy the values cache. */
1251 if (TYPE_CACHED_VALUES_P (t))
1253 TYPE_CACHED_VALUES_P (t) = 0;
1254 TYPE_CACHED_VALUES (t) = NULL_TREE;
1257 else if (code == TARGET_OPTION_NODE)
1259 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1260 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1261 sizeof (struct cl_target_option));
1263 else if (code == OPTIMIZATION_NODE)
1265 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1266 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1267 sizeof (struct cl_optimization));
1270 return t;
1273 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1274 For example, this can copy a list made of TREE_LIST nodes. */
1276 tree
1277 copy_list (tree list)
1279 tree head;
1280 tree prev, next;
1282 if (list == 0)
1283 return 0;
1285 head = prev = copy_node (list);
1286 next = TREE_CHAIN (list);
1287 while (next)
1289 TREE_CHAIN (prev) = copy_node (next);
1290 prev = TREE_CHAIN (prev);
1291 next = TREE_CHAIN (next);
1293 return head;
1297 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1298 INTEGER_CST with value CST and type TYPE. */
1300 static unsigned int
1301 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1303 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1304 /* We need extra HWIs if CST is an unsigned integer with its
1305 upper bit set. */
1306 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1307 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1308 return cst.get_len ();
1311 /* Return a new INTEGER_CST with value CST and type TYPE. */
1313 static tree
1314 build_new_int_cst (tree type, const wide_int &cst)
1316 unsigned int len = cst.get_len ();
1317 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1318 tree nt = make_int_cst (len, ext_len);
1320 if (len < ext_len)
1322 --ext_len;
1323 TREE_INT_CST_ELT (nt, ext_len)
1324 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1325 for (unsigned int i = len; i < ext_len; ++i)
1326 TREE_INT_CST_ELT (nt, i) = -1;
1328 else if (TYPE_UNSIGNED (type)
1329 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1331 len--;
1332 TREE_INT_CST_ELT (nt, len)
1333 = zext_hwi (cst.elt (len),
1334 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1337 for (unsigned int i = 0; i < len; i++)
1338 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1339 TREE_TYPE (nt) = type;
1340 return nt;
1343 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1345 static tree
1346 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1347 CXX_MEM_STAT_INFO)
1349 size_t length = sizeof (struct tree_poly_int_cst);
1350 record_node_allocation_statistics (POLY_INT_CST, length);
1352 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1354 TREE_SET_CODE (t, POLY_INT_CST);
1355 TREE_CONSTANT (t) = 1;
1356 TREE_TYPE (t) = type;
1357 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1358 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1359 return t;
1362 /* Create a constant tree that contains CST sign-extended to TYPE. */
1364 tree
1365 build_int_cst (tree type, poly_int64 cst)
1367 /* Support legacy code. */
1368 if (!type)
1369 type = integer_type_node;
1371 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1374 /* Create a constant tree that contains CST zero-extended to TYPE. */
1376 tree
1377 build_int_cstu (tree type, poly_uint64 cst)
1379 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1382 /* Create a constant tree that contains CST sign-extended to TYPE. */
1384 tree
1385 build_int_cst_type (tree type, poly_int64 cst)
1387 gcc_assert (type);
1388 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1391 /* Constructs tree in type TYPE from with value given by CST. Signedness
1392 of CST is assumed to be the same as the signedness of TYPE. */
1394 tree
1395 double_int_to_tree (tree type, double_int cst)
1397 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1400 /* We force the wide_int CST to the range of the type TYPE by sign or
1401 zero extending it. OVERFLOWABLE indicates if we are interested in
1402 overflow of the value, when >0 we are only interested in signed
1403 overflow, for <0 we are interested in any overflow. OVERFLOWED
1404 indicates whether overflow has already occurred. CONST_OVERFLOWED
1405 indicates whether constant overflow has already occurred. We force
1406 T's value to be within range of T's type (by setting to 0 or 1 all
1407 the bits outside the type's range). We set TREE_OVERFLOWED if,
1408 OVERFLOWED is nonzero,
1409 or OVERFLOWABLE is >0 and signed overflow occurs
1410 or OVERFLOWABLE is <0 and any overflow occurs
1411 We return a new tree node for the extended wide_int. The node
1412 is shared if no overflow flags are set. */
1415 tree
1416 force_fit_type (tree type, const poly_wide_int_ref &cst,
1417 int overflowable, bool overflowed)
1419 signop sign = TYPE_SIGN (type);
1421 /* If we need to set overflow flags, return a new unshared node. */
1422 if (overflowed || !wi::fits_to_tree_p (cst, type))
1424 if (overflowed
1425 || overflowable < 0
1426 || (overflowable > 0 && sign == SIGNED))
1428 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1429 sign);
1430 tree t;
1431 if (tmp.is_constant ())
1432 t = build_new_int_cst (type, tmp.coeffs[0]);
1433 else
1435 tree coeffs[NUM_POLY_INT_COEFFS];
1436 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1438 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1439 TREE_OVERFLOW (coeffs[i]) = 1;
1441 t = build_new_poly_int_cst (type, coeffs);
1443 TREE_OVERFLOW (t) = 1;
1444 return t;
1448 /* Else build a shared node. */
1449 return wide_int_to_tree (type, cst);
1452 /* These are the hash table functions for the hash table of INTEGER_CST
1453 nodes of a sizetype. */
1455 /* Return the hash code X, an INTEGER_CST. */
1457 hashval_t
1458 int_cst_hasher::hash (tree x)
1460 const_tree const t = x;
1461 hashval_t code = TYPE_UID (TREE_TYPE (t));
1462 int i;
1464 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1465 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1467 return code;
1470 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1471 is the same as that given by *Y, which is the same. */
1473 bool
1474 int_cst_hasher::equal (tree x, tree y)
1476 const_tree const xt = x;
1477 const_tree const yt = y;
1479 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1480 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1481 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1482 return false;
1484 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1485 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1486 return false;
1488 return true;
1491 /* Create an INT_CST node of TYPE and value CST.
1492 The returned node is always shared. For small integers we use a
1493 per-type vector cache, for larger ones we use a single hash table.
1494 The value is extended from its precision according to the sign of
1495 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1496 the upper bits and ensures that hashing and value equality based
1497 upon the underlying HOST_WIDE_INTs works without masking. */
1499 static tree
1500 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1502 tree t;
1503 int ix = -1;
1504 int limit = 0;
1506 gcc_assert (type);
1507 unsigned int prec = TYPE_PRECISION (type);
1508 signop sgn = TYPE_SIGN (type);
1510 /* Verify that everything is canonical. */
1511 int l = pcst.get_len ();
1512 if (l > 1)
1514 if (pcst.elt (l - 1) == 0)
1515 gcc_checking_assert (pcst.elt (l - 2) < 0);
1516 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1517 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1520 wide_int cst = wide_int::from (pcst, prec, sgn);
1521 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1523 if (ext_len == 1)
1525 /* We just need to store a single HOST_WIDE_INT. */
1526 HOST_WIDE_INT hwi;
1527 if (TYPE_UNSIGNED (type))
1528 hwi = cst.to_uhwi ();
1529 else
1530 hwi = cst.to_shwi ();
1532 switch (TREE_CODE (type))
1534 case NULLPTR_TYPE:
1535 gcc_assert (hwi == 0);
1536 /* Fallthru. */
1538 case POINTER_TYPE:
1539 case REFERENCE_TYPE:
1540 /* Cache NULL pointer and zero bounds. */
1541 if (hwi == 0)
1543 limit = 1;
1544 ix = 0;
1546 break;
1548 case BOOLEAN_TYPE:
1549 /* Cache false or true. */
1550 limit = 2;
1551 if (IN_RANGE (hwi, 0, 1))
1552 ix = hwi;
1553 break;
1555 case INTEGER_TYPE:
1556 case OFFSET_TYPE:
1557 if (TYPE_SIGN (type) == UNSIGNED)
1559 /* Cache [0, N). */
1560 limit = INTEGER_SHARE_LIMIT;
1561 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1562 ix = hwi;
1564 else
1566 /* Cache [-1, N). */
1567 limit = INTEGER_SHARE_LIMIT + 1;
1568 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1569 ix = hwi + 1;
1571 break;
1573 case ENUMERAL_TYPE:
1574 break;
1576 default:
1577 gcc_unreachable ();
1580 if (ix >= 0)
1582 /* Look for it in the type's vector of small shared ints. */
1583 if (!TYPE_CACHED_VALUES_P (type))
1585 TYPE_CACHED_VALUES_P (type) = 1;
1586 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1589 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1590 if (t)
1591 /* Make sure no one is clobbering the shared constant. */
1592 gcc_checking_assert (TREE_TYPE (t) == type
1593 && TREE_INT_CST_NUNITS (t) == 1
1594 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1595 && TREE_INT_CST_EXT_NUNITS (t) == 1
1596 && TREE_INT_CST_ELT (t, 0) == hwi);
1597 else
1599 /* Create a new shared int. */
1600 t = build_new_int_cst (type, cst);
1601 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1604 else
1606 /* Use the cache of larger shared ints, using int_cst_node as
1607 a temporary. */
1609 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1610 TREE_TYPE (int_cst_node) = type;
1612 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1613 t = *slot;
1614 if (!t)
1616 /* Insert this one into the hash table. */
1617 t = int_cst_node;
1618 *slot = t;
1619 /* Make a new node for next time round. */
1620 int_cst_node = make_int_cst (1, 1);
1624 else
1626 /* The value either hashes properly or we drop it on the floor
1627 for the gc to take care of. There will not be enough of them
1628 to worry about. */
1630 tree nt = build_new_int_cst (type, cst);
1631 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1632 t = *slot;
1633 if (!t)
1635 /* Insert this one into the hash table. */
1636 t = nt;
1637 *slot = t;
1639 else
1640 ggc_free (nt);
1643 return t;
1646 hashval_t
1647 poly_int_cst_hasher::hash (tree t)
1649 inchash::hash hstate;
1651 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1652 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1653 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1655 return hstate.end ();
1658 bool
1659 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1661 if (TREE_TYPE (x) != y.first)
1662 return false;
1663 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1664 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1665 return false;
1666 return true;
1669 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1670 The elements must also have type TYPE. */
1672 tree
1673 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1675 unsigned int prec = TYPE_PRECISION (type);
1676 gcc_assert (prec <= values.coeffs[0].get_precision ());
1677 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1679 inchash::hash h;
1680 h.add_int (TYPE_UID (type));
1681 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1682 h.add_wide_int (c.coeffs[i]);
1683 poly_int_cst_hasher::compare_type comp (type, &c);
1684 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1685 INSERT);
1686 if (*slot == NULL_TREE)
1688 tree coeffs[NUM_POLY_INT_COEFFS];
1689 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1690 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1691 *slot = build_new_poly_int_cst (type, coeffs);
1693 return *slot;
1696 /* Create a constant tree with value VALUE in type TYPE. */
1698 tree
1699 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1701 if (value.is_constant ())
1702 return wide_int_to_tree_1 (type, value.coeffs[0]);
1703 return build_poly_int_cst (type, value);
1706 void
1707 cache_integer_cst (tree t)
1709 tree type = TREE_TYPE (t);
1710 int ix = -1;
1711 int limit = 0;
1712 int prec = TYPE_PRECISION (type);
1714 gcc_assert (!TREE_OVERFLOW (t));
1716 switch (TREE_CODE (type))
1718 case NULLPTR_TYPE:
1719 gcc_assert (integer_zerop (t));
1720 /* Fallthru. */
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Cache NULL pointer. */
1725 if (integer_zerop (t))
1727 limit = 1;
1728 ix = 0;
1730 break;
1732 case BOOLEAN_TYPE:
1733 /* Cache false or true. */
1734 limit = 2;
1735 if (wi::ltu_p (wi::to_wide (t), 2))
1736 ix = TREE_INT_CST_ELT (t, 0);
1737 break;
1739 case INTEGER_TYPE:
1740 case OFFSET_TYPE:
1741 if (TYPE_UNSIGNED (type))
1743 /* Cache 0..N */
1744 limit = INTEGER_SHARE_LIMIT;
1746 /* This is a little hokie, but if the prec is smaller than
1747 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1748 obvious test will not get the correct answer. */
1749 if (prec < HOST_BITS_PER_WIDE_INT)
1751 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1752 ix = tree_to_uhwi (t);
1754 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1755 ix = tree_to_uhwi (t);
1757 else
1759 /* Cache -1..N */
1760 limit = INTEGER_SHARE_LIMIT + 1;
1762 if (integer_minus_onep (t))
1763 ix = 0;
1764 else if (!wi::neg_p (wi::to_wide (t)))
1766 if (prec < HOST_BITS_PER_WIDE_INT)
1768 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1769 ix = tree_to_shwi (t) + 1;
1771 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1772 ix = tree_to_shwi (t) + 1;
1775 break;
1777 case ENUMERAL_TYPE:
1778 break;
1780 default:
1781 gcc_unreachable ();
1784 if (ix >= 0)
1786 /* Look for it in the type's vector of small shared ints. */
1787 if (!TYPE_CACHED_VALUES_P (type))
1789 TYPE_CACHED_VALUES_P (type) = 1;
1790 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1793 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1794 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1796 else
1798 /* Use the cache of larger shared ints. */
1799 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1800 /* If there is already an entry for the number verify it's the
1801 same. */
1802 if (*slot)
1803 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1804 else
1805 /* Otherwise insert this one into the hash table. */
1806 *slot = t;
1811 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1812 and the rest are zeros. */
1814 tree
1815 build_low_bits_mask (tree type, unsigned bits)
1817 gcc_assert (bits <= TYPE_PRECISION (type));
1819 return wide_int_to_tree (type, wi::mask (bits, false,
1820 TYPE_PRECISION (type)));
1823 /* Checks that X is integer constant that can be expressed in (unsigned)
1824 HOST_WIDE_INT without loss of precision. */
1826 bool
1827 cst_and_fits_in_hwi (const_tree x)
1829 return (TREE_CODE (x) == INTEGER_CST
1830 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1833 /* Build a newly constructed VECTOR_CST with the given values of
1834 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1836 tree
1837 make_vector (unsigned log2_npatterns,
1838 unsigned int nelts_per_pattern MEM_STAT_DECL)
1840 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1841 tree t;
1842 unsigned npatterns = 1 << log2_npatterns;
1843 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1844 unsigned length = (sizeof (struct tree_vector)
1845 + (encoded_nelts - 1) * sizeof (tree));
1847 record_node_allocation_statistics (VECTOR_CST, length);
1849 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1851 TREE_SET_CODE (t, VECTOR_CST);
1852 TREE_CONSTANT (t) = 1;
1853 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1854 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1856 return t;
1859 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1860 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1862 tree
1863 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1865 unsigned HOST_WIDE_INT idx, nelts;
1866 tree value;
1868 /* We can't construct a VECTOR_CST for a variable number of elements. */
1869 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1870 tree_vector_builder vec (type, nelts, 1);
1871 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1873 if (TREE_CODE (value) == VECTOR_CST)
1875 /* If NELTS is constant then this must be too. */
1876 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1877 for (unsigned i = 0; i < sub_nelts; ++i)
1878 vec.quick_push (VECTOR_CST_ELT (value, i));
1880 else
1881 vec.quick_push (value);
1883 while (vec.length () < nelts)
1884 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1886 return vec.build ();
1889 /* Build a vector of type VECTYPE where all the elements are SCs. */
1890 tree
1891 build_vector_from_val (tree vectype, tree sc)
1893 unsigned HOST_WIDE_INT i, nunits;
1895 if (sc == error_mark_node)
1896 return sc;
1898 /* Verify that the vector type is suitable for SC. Note that there
1899 is some inconsistency in the type-system with respect to restrict
1900 qualifications of pointers. Vector types always have a main-variant
1901 element type and the qualification is applied to the vector-type.
1902 So TREE_TYPE (vector-type) does not return a properly qualified
1903 vector element-type. */
1904 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1905 TREE_TYPE (vectype)));
1907 if (CONSTANT_CLASS_P (sc))
1909 tree_vector_builder v (vectype, 1, 1);
1910 v.quick_push (sc);
1911 return v.build ();
1913 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1914 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1915 else
1917 vec<constructor_elt, va_gc> *v;
1918 vec_alloc (v, nunits);
1919 for (i = 0; i < nunits; ++i)
1920 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1921 return build_constructor (vectype, v);
1925 /* If TYPE is not a vector type, just return SC, otherwise return
1926 build_vector_from_val (TYPE, SC). */
1928 tree
1929 build_uniform_cst (tree type, tree sc)
1931 if (!VECTOR_TYPE_P (type))
1932 return sc;
1934 return build_vector_from_val (type, sc);
1937 /* Build a vector series of type TYPE in which element I has the value
1938 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1939 and a VEC_SERIES_EXPR otherwise. */
1941 tree
1942 build_vec_series (tree type, tree base, tree step)
1944 if (integer_zerop (step))
1945 return build_vector_from_val (type, base);
1946 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1948 tree_vector_builder builder (type, 1, 3);
1949 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1950 wi::to_wide (base) + wi::to_wide (step));
1951 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1952 wi::to_wide (elt1) + wi::to_wide (step));
1953 builder.quick_push (base);
1954 builder.quick_push (elt1);
1955 builder.quick_push (elt2);
1956 return builder.build ();
1958 return build2 (VEC_SERIES_EXPR, type, base, step);
1961 /* Return a vector with the same number of units and number of bits
1962 as VEC_TYPE, but in which the elements are a linear series of unsigned
1963 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1965 tree
1966 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1968 tree index_vec_type = vec_type;
1969 tree index_elt_type = TREE_TYPE (vec_type);
1970 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1971 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1973 index_elt_type = build_nonstandard_integer_type
1974 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1975 index_vec_type = build_vector_type (index_elt_type, nunits);
1978 tree_vector_builder v (index_vec_type, 1, 3);
1979 for (unsigned int i = 0; i < 3; ++i)
1980 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1981 return v.build ();
1984 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1985 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1987 void
1988 recompute_constructor_flags (tree c)
1990 unsigned int i;
1991 tree val;
1992 bool constant_p = true;
1993 bool side_effects_p = false;
1994 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1996 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1998 /* Mostly ctors will have elts that don't have side-effects, so
1999 the usual case is to scan all the elements. Hence a single
2000 loop for both const and side effects, rather than one loop
2001 each (with early outs). */
2002 if (!TREE_CONSTANT (val))
2003 constant_p = false;
2004 if (TREE_SIDE_EFFECTS (val))
2005 side_effects_p = true;
2008 TREE_SIDE_EFFECTS (c) = side_effects_p;
2009 TREE_CONSTANT (c) = constant_p;
2012 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2013 CONSTRUCTOR C. */
2015 void
2016 verify_constructor_flags (tree c)
2018 unsigned int i;
2019 tree val;
2020 bool constant_p = TREE_CONSTANT (c);
2021 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2022 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2024 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2026 if (constant_p && !TREE_CONSTANT (val))
2027 internal_error ("non-constant element in constant CONSTRUCTOR");
2028 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2029 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2033 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2034 are in the vec pointed to by VALS. */
2035 tree
2036 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2038 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2040 TREE_TYPE (c) = type;
2041 CONSTRUCTOR_ELTS (c) = vals;
2043 recompute_constructor_flags (c);
2045 return c;
2048 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2049 INDEX and VALUE. */
2050 tree
2051 build_constructor_single (tree type, tree index, tree value)
2053 vec<constructor_elt, va_gc> *v;
2054 constructor_elt elt = {index, value};
2056 vec_alloc (v, 1);
2057 v->quick_push (elt);
2059 return build_constructor (type, v);
2063 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2064 are in a list pointed to by VALS. */
2065 tree
2066 build_constructor_from_list (tree type, tree vals)
2068 tree t;
2069 vec<constructor_elt, va_gc> *v = NULL;
2071 if (vals)
2073 vec_alloc (v, list_length (vals));
2074 for (t = vals; t; t = TREE_CHAIN (t))
2075 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2078 return build_constructor (type, v);
2081 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2082 of elements, provided as index/value pairs. */
2084 tree
2085 build_constructor_va (tree type, int nelts, ...)
2087 vec<constructor_elt, va_gc> *v = NULL;
2088 va_list p;
2090 va_start (p, nelts);
2091 vec_alloc (v, nelts);
2092 while (nelts--)
2094 tree index = va_arg (p, tree);
2095 tree value = va_arg (p, tree);
2096 CONSTRUCTOR_APPEND_ELT (v, index, value);
2098 va_end (p);
2099 return build_constructor (type, v);
2102 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2104 tree
2105 build_clobber (tree type)
2107 tree clobber = build_constructor (type, NULL);
2108 TREE_THIS_VOLATILE (clobber) = true;
2109 return clobber;
2112 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2114 tree
2115 build_fixed (tree type, FIXED_VALUE_TYPE f)
2117 tree v;
2118 FIXED_VALUE_TYPE *fp;
2120 v = make_node (FIXED_CST);
2121 fp = ggc_alloc<fixed_value> ();
2122 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2124 TREE_TYPE (v) = type;
2125 TREE_FIXED_CST_PTR (v) = fp;
2126 return v;
2129 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2131 tree
2132 build_real (tree type, REAL_VALUE_TYPE d)
2134 tree v;
2135 REAL_VALUE_TYPE *dp;
2136 int overflow = 0;
2138 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2139 Consider doing it via real_convert now. */
2141 v = make_node (REAL_CST);
2142 dp = ggc_alloc<real_value> ();
2143 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2145 TREE_TYPE (v) = type;
2146 TREE_REAL_CST_PTR (v) = dp;
2147 TREE_OVERFLOW (v) = overflow;
2148 return v;
2151 /* Like build_real, but first truncate D to the type. */
2153 tree
2154 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2156 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2159 /* Return a new REAL_CST node whose type is TYPE
2160 and whose value is the integer value of the INTEGER_CST node I. */
2162 REAL_VALUE_TYPE
2163 real_value_from_int_cst (const_tree type, const_tree i)
2165 REAL_VALUE_TYPE d;
2167 /* Clear all bits of the real value type so that we can later do
2168 bitwise comparisons to see if two values are the same. */
2169 memset (&d, 0, sizeof d);
2171 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2172 TYPE_SIGN (TREE_TYPE (i)));
2173 return d;
2176 /* Given a tree representing an integer constant I, return a tree
2177 representing the same value as a floating-point constant of type TYPE. */
2179 tree
2180 build_real_from_int_cst (tree type, const_tree i)
2182 tree v;
2183 int overflow = TREE_OVERFLOW (i);
2185 v = build_real (type, real_value_from_int_cst (type, i));
2187 TREE_OVERFLOW (v) |= overflow;
2188 return v;
2191 /* Return a newly constructed STRING_CST node whose value is
2192 the LEN characters at STR.
2193 Note that for a C string literal, LEN should include the trailing NUL.
2194 The TREE_TYPE is not initialized. */
2196 tree
2197 build_string (int len, const char *str)
2199 tree s;
2200 size_t length;
2202 /* Do not waste bytes provided by padding of struct tree_string. */
2203 length = len + offsetof (struct tree_string, str) + 1;
2205 record_node_allocation_statistics (STRING_CST, length);
2207 s = (tree) ggc_internal_alloc (length);
2209 memset (s, 0, sizeof (struct tree_typed));
2210 TREE_SET_CODE (s, STRING_CST);
2211 TREE_CONSTANT (s) = 1;
2212 TREE_STRING_LENGTH (s) = len;
2213 memcpy (s->string.str, str, len);
2214 s->string.str[len] = '\0';
2216 return s;
2219 /* Return a newly constructed COMPLEX_CST node whose value is
2220 specified by the real and imaginary parts REAL and IMAG.
2221 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2222 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2224 tree
2225 build_complex (tree type, tree real, tree imag)
2227 gcc_assert (CONSTANT_CLASS_P (real));
2228 gcc_assert (CONSTANT_CLASS_P (imag));
2230 tree t = make_node (COMPLEX_CST);
2232 TREE_REALPART (t) = real;
2233 TREE_IMAGPART (t) = imag;
2234 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2235 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2236 return t;
2239 /* Build a complex (inf +- 0i), such as for the result of cproj.
2240 TYPE is the complex tree type of the result. If NEG is true, the
2241 imaginary zero is negative. */
2243 tree
2244 build_complex_inf (tree type, bool neg)
2246 REAL_VALUE_TYPE rinf, rzero = dconst0;
2248 real_inf (&rinf);
2249 rzero.sign = neg;
2250 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2251 build_real (TREE_TYPE (type), rzero));
2254 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2255 element is set to 1. In particular, this is 1 + i for complex types. */
2257 tree
2258 build_each_one_cst (tree type)
2260 if (TREE_CODE (type) == COMPLEX_TYPE)
2262 tree scalar = build_one_cst (TREE_TYPE (type));
2263 return build_complex (type, scalar, scalar);
2265 else
2266 return build_one_cst (type);
2269 /* Return a constant of arithmetic type TYPE which is the
2270 multiplicative identity of the set TYPE. */
2272 tree
2273 build_one_cst (tree type)
2275 switch (TREE_CODE (type))
2277 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2278 case POINTER_TYPE: case REFERENCE_TYPE:
2279 case OFFSET_TYPE:
2280 return build_int_cst (type, 1);
2282 case REAL_TYPE:
2283 return build_real (type, dconst1);
2285 case FIXED_POINT_TYPE:
2286 /* We can only generate 1 for accum types. */
2287 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2288 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2290 case VECTOR_TYPE:
2292 tree scalar = build_one_cst (TREE_TYPE (type));
2294 return build_vector_from_val (type, scalar);
2297 case COMPLEX_TYPE:
2298 return build_complex (type,
2299 build_one_cst (TREE_TYPE (type)),
2300 build_zero_cst (TREE_TYPE (type)));
2302 default:
2303 gcc_unreachable ();
2307 /* Return an integer of type TYPE containing all 1's in as much precision as
2308 it contains, or a complex or vector whose subparts are such integers. */
2310 tree
2311 build_all_ones_cst (tree type)
2313 if (TREE_CODE (type) == COMPLEX_TYPE)
2315 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2316 return build_complex (type, scalar, scalar);
2318 else
2319 return build_minus_one_cst (type);
2322 /* Return a constant of arithmetic type TYPE which is the
2323 opposite of the multiplicative identity of the set TYPE. */
2325 tree
2326 build_minus_one_cst (tree type)
2328 switch (TREE_CODE (type))
2330 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case OFFSET_TYPE:
2333 return build_int_cst (type, -1);
2335 case REAL_TYPE:
2336 return build_real (type, dconstm1);
2338 case FIXED_POINT_TYPE:
2339 /* We can only generate 1 for accum types. */
2340 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2341 return build_fixed (type,
2342 fixed_from_double_int (double_int_minus_one,
2343 SCALAR_TYPE_MODE (type)));
2345 case VECTOR_TYPE:
2347 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2349 return build_vector_from_val (type, scalar);
2352 case COMPLEX_TYPE:
2353 return build_complex (type,
2354 build_minus_one_cst (TREE_TYPE (type)),
2355 build_zero_cst (TREE_TYPE (type)));
2357 default:
2358 gcc_unreachable ();
2362 /* Build 0 constant of type TYPE. This is used by constructor folding
2363 and thus the constant should be represented in memory by
2364 zero(es). */
2366 tree
2367 build_zero_cst (tree type)
2369 switch (TREE_CODE (type))
2371 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2372 case POINTER_TYPE: case REFERENCE_TYPE:
2373 case OFFSET_TYPE: case NULLPTR_TYPE:
2374 return build_int_cst (type, 0);
2376 case REAL_TYPE:
2377 return build_real (type, dconst0);
2379 case FIXED_POINT_TYPE:
2380 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2382 case VECTOR_TYPE:
2384 tree scalar = build_zero_cst (TREE_TYPE (type));
2386 return build_vector_from_val (type, scalar);
2389 case COMPLEX_TYPE:
2391 tree zero = build_zero_cst (TREE_TYPE (type));
2393 return build_complex (type, zero, zero);
2396 default:
2397 if (!AGGREGATE_TYPE_P (type))
2398 return fold_convert (type, integer_zero_node);
2399 return build_constructor (type, NULL);
2404 /* Build a BINFO with LEN language slots. */
2406 tree
2407 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2409 tree t;
2410 size_t length = (offsetof (struct tree_binfo, base_binfos)
2411 + vec<tree, va_gc>::embedded_size (base_binfos));
2413 record_node_allocation_statistics (TREE_BINFO, length);
2415 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2417 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2419 TREE_SET_CODE (t, TREE_BINFO);
2421 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2423 return t;
2426 /* Create a CASE_LABEL_EXPR tree node and return it. */
2428 tree
2429 build_case_label (tree low_value, tree high_value, tree label_decl)
2431 tree t = make_node (CASE_LABEL_EXPR);
2433 TREE_TYPE (t) = void_type_node;
2434 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2436 CASE_LOW (t) = low_value;
2437 CASE_HIGH (t) = high_value;
2438 CASE_LABEL (t) = label_decl;
2439 CASE_CHAIN (t) = NULL_TREE;
2441 return t;
2444 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2445 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2446 The latter determines the length of the HOST_WIDE_INT vector. */
2448 tree
2449 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2451 tree t;
2452 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2453 + sizeof (struct tree_int_cst));
2455 gcc_assert (len);
2456 record_node_allocation_statistics (INTEGER_CST, length);
2458 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2460 TREE_SET_CODE (t, INTEGER_CST);
2461 TREE_INT_CST_NUNITS (t) = len;
2462 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2463 /* to_offset can only be applied to trees that are offset_int-sized
2464 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2465 must be exactly the precision of offset_int and so LEN is correct. */
2466 if (ext_len <= OFFSET_INT_ELTS)
2467 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2468 else
2469 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2471 TREE_CONSTANT (t) = 1;
2473 return t;
2476 /* Build a newly constructed TREE_VEC node of length LEN. */
2478 tree
2479 make_tree_vec (int len MEM_STAT_DECL)
2481 tree t;
2482 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2484 record_node_allocation_statistics (TREE_VEC, length);
2486 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2488 TREE_SET_CODE (t, TREE_VEC);
2489 TREE_VEC_LENGTH (t) = len;
2491 return t;
2494 /* Grow a TREE_VEC node to new length LEN. */
2496 tree
2497 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2499 gcc_assert (TREE_CODE (v) == TREE_VEC);
2501 int oldlen = TREE_VEC_LENGTH (v);
2502 gcc_assert (len > oldlen);
2504 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2505 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2507 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2509 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2511 TREE_VEC_LENGTH (v) = len;
2513 return v;
2516 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2517 fixed, and scalar, complex or vector. */
2519 bool
2520 zerop (const_tree expr)
2522 return (integer_zerop (expr)
2523 || real_zerop (expr)
2524 || fixed_zerop (expr));
2527 /* Return 1 if EXPR is the integer constant zero or a complex constant
2528 of zero, or a location wrapper for such a constant. */
2530 bool
2531 integer_zerop (const_tree expr)
2533 STRIP_ANY_LOCATION_WRAPPER (expr);
2535 switch (TREE_CODE (expr))
2537 case INTEGER_CST:
2538 return wi::to_wide (expr) == 0;
2539 case COMPLEX_CST:
2540 return (integer_zerop (TREE_REALPART (expr))
2541 && integer_zerop (TREE_IMAGPART (expr)));
2542 case VECTOR_CST:
2543 return (VECTOR_CST_NPATTERNS (expr) == 1
2544 && VECTOR_CST_DUPLICATE_P (expr)
2545 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2546 default:
2547 return false;
2551 /* Return 1 if EXPR is the integer constant one or the corresponding
2552 complex constant, or a location wrapper for such a constant. */
2554 bool
2555 integer_onep (const_tree expr)
2557 STRIP_ANY_LOCATION_WRAPPER (expr);
2559 switch (TREE_CODE (expr))
2561 case INTEGER_CST:
2562 return wi::eq_p (wi::to_widest (expr), 1);
2563 case COMPLEX_CST:
2564 return (integer_onep (TREE_REALPART (expr))
2565 && integer_zerop (TREE_IMAGPART (expr)));
2566 case VECTOR_CST:
2567 return (VECTOR_CST_NPATTERNS (expr) == 1
2568 && VECTOR_CST_DUPLICATE_P (expr)
2569 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2570 default:
2571 return false;
2575 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2576 return 1 if every piece is the integer constant one.
2577 Also return 1 for location wrappers for such a constant. */
2579 bool
2580 integer_each_onep (const_tree expr)
2582 STRIP_ANY_LOCATION_WRAPPER (expr);
2584 if (TREE_CODE (expr) == COMPLEX_CST)
2585 return (integer_onep (TREE_REALPART (expr))
2586 && integer_onep (TREE_IMAGPART (expr)));
2587 else
2588 return integer_onep (expr);
2591 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2592 it contains, or a complex or vector whose subparts are such integers,
2593 or a location wrapper for such a constant. */
2595 bool
2596 integer_all_onesp (const_tree expr)
2598 STRIP_ANY_LOCATION_WRAPPER (expr);
2600 if (TREE_CODE (expr) == COMPLEX_CST
2601 && integer_all_onesp (TREE_REALPART (expr))
2602 && integer_all_onesp (TREE_IMAGPART (expr)))
2603 return true;
2605 else if (TREE_CODE (expr) == VECTOR_CST)
2606 return (VECTOR_CST_NPATTERNS (expr) == 1
2607 && VECTOR_CST_DUPLICATE_P (expr)
2608 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2610 else if (TREE_CODE (expr) != INTEGER_CST)
2611 return false;
2613 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2614 == wi::to_wide (expr));
2617 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2618 for such a constant. */
2620 bool
2621 integer_minus_onep (const_tree expr)
2623 STRIP_ANY_LOCATION_WRAPPER (expr);
2625 if (TREE_CODE (expr) == COMPLEX_CST)
2626 return (integer_all_onesp (TREE_REALPART (expr))
2627 && integer_zerop (TREE_IMAGPART (expr)));
2628 else
2629 return integer_all_onesp (expr);
2632 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2633 one bit on), or a location wrapper for such a constant. */
2635 bool
2636 integer_pow2p (const_tree expr)
2638 STRIP_ANY_LOCATION_WRAPPER (expr);
2640 if (TREE_CODE (expr) == COMPLEX_CST
2641 && integer_pow2p (TREE_REALPART (expr))
2642 && integer_zerop (TREE_IMAGPART (expr)))
2643 return true;
2645 if (TREE_CODE (expr) != INTEGER_CST)
2646 return false;
2648 return wi::popcount (wi::to_wide (expr)) == 1;
2651 /* Return 1 if EXPR is an integer constant other than zero or a
2652 complex constant other than zero, or a location wrapper for such a
2653 constant. */
2655 bool
2656 integer_nonzerop (const_tree expr)
2658 STRIP_ANY_LOCATION_WRAPPER (expr);
2660 return ((TREE_CODE (expr) == INTEGER_CST
2661 && wi::to_wide (expr) != 0)
2662 || (TREE_CODE (expr) == COMPLEX_CST
2663 && (integer_nonzerop (TREE_REALPART (expr))
2664 || integer_nonzerop (TREE_IMAGPART (expr)))));
2667 /* Return 1 if EXPR is the integer constant one. For vector,
2668 return 1 if every piece is the integer constant minus one
2669 (representing the value TRUE).
2670 Also return 1 for location wrappers for such a constant. */
2672 bool
2673 integer_truep (const_tree expr)
2675 STRIP_ANY_LOCATION_WRAPPER (expr);
2677 if (TREE_CODE (expr) == VECTOR_CST)
2678 return integer_all_onesp (expr);
2679 return integer_onep (expr);
2682 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2683 for such a constant. */
2685 bool
2686 fixed_zerop (const_tree expr)
2688 STRIP_ANY_LOCATION_WRAPPER (expr);
2690 return (TREE_CODE (expr) == FIXED_CST
2691 && TREE_FIXED_CST (expr).data.is_zero ());
2694 /* Return the power of two represented by a tree node known to be a
2695 power of two. */
2698 tree_log2 (const_tree expr)
2700 if (TREE_CODE (expr) == COMPLEX_CST)
2701 return tree_log2 (TREE_REALPART (expr));
2703 return wi::exact_log2 (wi::to_wide (expr));
2706 /* Similar, but return the largest integer Y such that 2 ** Y is less
2707 than or equal to EXPR. */
2710 tree_floor_log2 (const_tree expr)
2712 if (TREE_CODE (expr) == COMPLEX_CST)
2713 return tree_log2 (TREE_REALPART (expr));
2715 return wi::floor_log2 (wi::to_wide (expr));
2718 /* Return number of known trailing zero bits in EXPR, or, if the value of
2719 EXPR is known to be zero, the precision of it's type. */
2721 unsigned int
2722 tree_ctz (const_tree expr)
2724 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2725 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2726 return 0;
2728 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2729 switch (TREE_CODE (expr))
2731 case INTEGER_CST:
2732 ret1 = wi::ctz (wi::to_wide (expr));
2733 return MIN (ret1, prec);
2734 case SSA_NAME:
2735 ret1 = wi::ctz (get_nonzero_bits (expr));
2736 return MIN (ret1, prec);
2737 case PLUS_EXPR:
2738 case MINUS_EXPR:
2739 case BIT_IOR_EXPR:
2740 case BIT_XOR_EXPR:
2741 case MIN_EXPR:
2742 case MAX_EXPR:
2743 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2744 if (ret1 == 0)
2745 return ret1;
2746 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2747 return MIN (ret1, ret2);
2748 case POINTER_PLUS_EXPR:
2749 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2750 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2751 /* Second operand is sizetype, which could be in theory
2752 wider than pointer's precision. Make sure we never
2753 return more than prec. */
2754 ret2 = MIN (ret2, prec);
2755 return MIN (ret1, ret2);
2756 case BIT_AND_EXPR:
2757 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2758 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2759 return MAX (ret1, ret2);
2760 case MULT_EXPR:
2761 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2762 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2763 return MIN (ret1 + ret2, prec);
2764 case LSHIFT_EXPR:
2765 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2767 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2769 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2770 return MIN (ret1 + ret2, prec);
2772 return ret1;
2773 case RSHIFT_EXPR:
2774 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2775 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2777 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2778 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2779 if (ret1 > ret2)
2780 return ret1 - ret2;
2782 return 0;
2783 case TRUNC_DIV_EXPR:
2784 case CEIL_DIV_EXPR:
2785 case FLOOR_DIV_EXPR:
2786 case ROUND_DIV_EXPR:
2787 case EXACT_DIV_EXPR:
2788 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2789 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2791 int l = tree_log2 (TREE_OPERAND (expr, 1));
2792 if (l >= 0)
2794 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2795 ret2 = l;
2796 if (ret1 > ret2)
2797 return ret1 - ret2;
2800 return 0;
2801 CASE_CONVERT:
2802 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2803 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2804 ret1 = prec;
2805 return MIN (ret1, prec);
2806 case SAVE_EXPR:
2807 return tree_ctz (TREE_OPERAND (expr, 0));
2808 case COND_EXPR:
2809 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2810 if (ret1 == 0)
2811 return 0;
2812 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2813 return MIN (ret1, ret2);
2814 case COMPOUND_EXPR:
2815 return tree_ctz (TREE_OPERAND (expr, 1));
2816 case ADDR_EXPR:
2817 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2818 if (ret1 > BITS_PER_UNIT)
2820 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2821 return MIN (ret1, prec);
2823 return 0;
2824 default:
2825 return 0;
2829 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2830 decimal float constants, so don't return 1 for them.
2831 Also return 1 for location wrappers around such a constant. */
2833 bool
2834 real_zerop (const_tree expr)
2836 STRIP_ANY_LOCATION_WRAPPER (expr);
2838 switch (TREE_CODE (expr))
2840 case REAL_CST:
2841 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2842 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2843 case COMPLEX_CST:
2844 return real_zerop (TREE_REALPART (expr))
2845 && real_zerop (TREE_IMAGPART (expr));
2846 case VECTOR_CST:
2848 /* Don't simply check for a duplicate because the predicate
2849 accepts both +0.0 and -0.0. */
2850 unsigned count = vector_cst_encoded_nelts (expr);
2851 for (unsigned int i = 0; i < count; ++i)
2852 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2853 return false;
2854 return true;
2856 default:
2857 return false;
2861 /* Return 1 if EXPR is the real constant one in real or complex form.
2862 Trailing zeroes matter for decimal float constants, so don't return
2863 1 for them.
2864 Also return 1 for location wrappers around such a constant. */
2866 bool
2867 real_onep (const_tree expr)
2869 STRIP_ANY_LOCATION_WRAPPER (expr);
2871 switch (TREE_CODE (expr))
2873 case REAL_CST:
2874 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2875 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2876 case COMPLEX_CST:
2877 return real_onep (TREE_REALPART (expr))
2878 && real_zerop (TREE_IMAGPART (expr));
2879 case VECTOR_CST:
2880 return (VECTOR_CST_NPATTERNS (expr) == 1
2881 && VECTOR_CST_DUPLICATE_P (expr)
2882 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2883 default:
2884 return false;
2888 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2889 matter for decimal float constants, so don't return 1 for them.
2890 Also return 1 for location wrappers around such a constant. */
2892 bool
2893 real_minus_onep (const_tree expr)
2895 STRIP_ANY_LOCATION_WRAPPER (expr);
2897 switch (TREE_CODE (expr))
2899 case REAL_CST:
2900 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2901 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2902 case COMPLEX_CST:
2903 return real_minus_onep (TREE_REALPART (expr))
2904 && real_zerop (TREE_IMAGPART (expr));
2905 case VECTOR_CST:
2906 return (VECTOR_CST_NPATTERNS (expr) == 1
2907 && VECTOR_CST_DUPLICATE_P (expr)
2908 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2909 default:
2910 return false;
2914 /* Nonzero if EXP is a constant or a cast of a constant. */
2916 bool
2917 really_constant_p (const_tree exp)
2919 /* This is not quite the same as STRIP_NOPS. It does more. */
2920 while (CONVERT_EXPR_P (exp)
2921 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2922 exp = TREE_OPERAND (exp, 0);
2923 return TREE_CONSTANT (exp);
2926 /* Return true if T holds a polynomial pointer difference, storing it in
2927 *VALUE if so. A true return means that T's precision is no greater
2928 than 64 bits, which is the largest address space we support, so *VALUE
2929 never loses precision. However, the signedness of the result does
2930 not necessarily match the signedness of T: sometimes an unsigned type
2931 like sizetype is used to encode a value that is actually negative. */
2933 bool
2934 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2936 if (!t)
2937 return false;
2938 if (TREE_CODE (t) == INTEGER_CST)
2940 if (!cst_and_fits_in_hwi (t))
2941 return false;
2942 *value = int_cst_value (t);
2943 return true;
2945 if (POLY_INT_CST_P (t))
2947 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2948 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2949 return false;
2950 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2951 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2952 return true;
2954 return false;
2957 poly_int64
2958 tree_to_poly_int64 (const_tree t)
2960 gcc_assert (tree_fits_poly_int64_p (t));
2961 if (POLY_INT_CST_P (t))
2962 return poly_int_cst_value (t).force_shwi ();
2963 return TREE_INT_CST_LOW (t);
2966 poly_uint64
2967 tree_to_poly_uint64 (const_tree t)
2969 gcc_assert (tree_fits_poly_uint64_p (t));
2970 if (POLY_INT_CST_P (t))
2971 return poly_int_cst_value (t).force_uhwi ();
2972 return TREE_INT_CST_LOW (t);
2975 /* Return first list element whose TREE_VALUE is ELEM.
2976 Return 0 if ELEM is not in LIST. */
2978 tree
2979 value_member (tree elem, tree list)
2981 while (list)
2983 if (elem == TREE_VALUE (list))
2984 return list;
2985 list = TREE_CHAIN (list);
2987 return NULL_TREE;
2990 /* Return first list element whose TREE_PURPOSE is ELEM.
2991 Return 0 if ELEM is not in LIST. */
2993 tree
2994 purpose_member (const_tree elem, tree list)
2996 while (list)
2998 if (elem == TREE_PURPOSE (list))
2999 return list;
3000 list = TREE_CHAIN (list);
3002 return NULL_TREE;
3005 /* Return true if ELEM is in V. */
3007 bool
3008 vec_member (const_tree elem, vec<tree, va_gc> *v)
3010 unsigned ix;
3011 tree t;
3012 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3013 if (elem == t)
3014 return true;
3015 return false;
3018 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3019 NULL_TREE. */
3021 tree
3022 chain_index (int idx, tree chain)
3024 for (; chain && idx > 0; --idx)
3025 chain = TREE_CHAIN (chain);
3026 return chain;
3029 /* Return nonzero if ELEM is part of the chain CHAIN. */
3031 bool
3032 chain_member (const_tree elem, const_tree chain)
3034 while (chain)
3036 if (elem == chain)
3037 return true;
3038 chain = DECL_CHAIN (chain);
3041 return false;
3044 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3045 We expect a null pointer to mark the end of the chain.
3046 This is the Lisp primitive `length'. */
3049 list_length (const_tree t)
3051 const_tree p = t;
3052 #ifdef ENABLE_TREE_CHECKING
3053 const_tree q = t;
3054 #endif
3055 int len = 0;
3057 while (p)
3059 p = TREE_CHAIN (p);
3060 #ifdef ENABLE_TREE_CHECKING
3061 if (len % 2)
3062 q = TREE_CHAIN (q);
3063 gcc_assert (p != q);
3064 #endif
3065 len++;
3068 return len;
3071 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3072 UNION_TYPE TYPE, or NULL_TREE if none. */
3074 tree
3075 first_field (const_tree type)
3077 tree t = TYPE_FIELDS (type);
3078 while (t && TREE_CODE (t) != FIELD_DECL)
3079 t = TREE_CHAIN (t);
3080 return t;
3083 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3084 by modifying the last node in chain 1 to point to chain 2.
3085 This is the Lisp primitive `nconc'. */
3087 tree
3088 chainon (tree op1, tree op2)
3090 tree t1;
3092 if (!op1)
3093 return op2;
3094 if (!op2)
3095 return op1;
3097 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3098 continue;
3099 TREE_CHAIN (t1) = op2;
3101 #ifdef ENABLE_TREE_CHECKING
3103 tree t2;
3104 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3105 gcc_assert (t2 != t1);
3107 #endif
3109 return op1;
3112 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3114 tree
3115 tree_last (tree chain)
3117 tree next;
3118 if (chain)
3119 while ((next = TREE_CHAIN (chain)))
3120 chain = next;
3121 return chain;
3124 /* Reverse the order of elements in the chain T,
3125 and return the new head of the chain (old last element). */
3127 tree
3128 nreverse (tree t)
3130 tree prev = 0, decl, next;
3131 for (decl = t; decl; decl = next)
3133 /* We shouldn't be using this function to reverse BLOCK chains; we
3134 have blocks_nreverse for that. */
3135 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3136 next = TREE_CHAIN (decl);
3137 TREE_CHAIN (decl) = prev;
3138 prev = decl;
3140 return prev;
3143 /* Return a newly created TREE_LIST node whose
3144 purpose and value fields are PARM and VALUE. */
3146 tree
3147 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3149 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3150 TREE_PURPOSE (t) = parm;
3151 TREE_VALUE (t) = value;
3152 return t;
3155 /* Build a chain of TREE_LIST nodes from a vector. */
3157 tree
3158 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3160 tree ret = NULL_TREE;
3161 tree *pp = &ret;
3162 unsigned int i;
3163 tree t;
3164 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3166 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3167 pp = &TREE_CHAIN (*pp);
3169 return ret;
3172 /* Return a newly created TREE_LIST node whose
3173 purpose and value fields are PURPOSE and VALUE
3174 and whose TREE_CHAIN is CHAIN. */
3176 tree
3177 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3179 tree node;
3181 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3182 memset (node, 0, sizeof (struct tree_common));
3184 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3186 TREE_SET_CODE (node, TREE_LIST);
3187 TREE_CHAIN (node) = chain;
3188 TREE_PURPOSE (node) = purpose;
3189 TREE_VALUE (node) = value;
3190 return node;
3193 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3194 trees. */
3196 vec<tree, va_gc> *
3197 ctor_to_vec (tree ctor)
3199 vec<tree, va_gc> *vec;
3200 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3201 unsigned int ix;
3202 tree val;
3204 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3205 vec->quick_push (val);
3207 return vec;
3210 /* Return the size nominally occupied by an object of type TYPE
3211 when it resides in memory. The value is measured in units of bytes,
3212 and its data type is that normally used for type sizes
3213 (which is the first type created by make_signed_type or
3214 make_unsigned_type). */
3216 tree
3217 size_in_bytes_loc (location_t loc, const_tree type)
3219 tree t;
3221 if (type == error_mark_node)
3222 return integer_zero_node;
3224 type = TYPE_MAIN_VARIANT (type);
3225 t = TYPE_SIZE_UNIT (type);
3227 if (t == 0)
3229 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3230 return size_zero_node;
3233 return t;
3236 /* Return the size of TYPE (in bytes) as a wide integer
3237 or return -1 if the size can vary or is larger than an integer. */
3239 HOST_WIDE_INT
3240 int_size_in_bytes (const_tree type)
3242 tree t;
3244 if (type == error_mark_node)
3245 return 0;
3247 type = TYPE_MAIN_VARIANT (type);
3248 t = TYPE_SIZE_UNIT (type);
3250 if (t && tree_fits_uhwi_p (t))
3251 return TREE_INT_CST_LOW (t);
3252 else
3253 return -1;
3256 /* Return the maximum size of TYPE (in bytes) as a wide integer
3257 or return -1 if the size can vary or is larger than an integer. */
3259 HOST_WIDE_INT
3260 max_int_size_in_bytes (const_tree type)
3262 HOST_WIDE_INT size = -1;
3263 tree size_tree;
3265 /* If this is an array type, check for a possible MAX_SIZE attached. */
3267 if (TREE_CODE (type) == ARRAY_TYPE)
3269 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3271 if (size_tree && tree_fits_uhwi_p (size_tree))
3272 size = tree_to_uhwi (size_tree);
3275 /* If we still haven't been able to get a size, see if the language
3276 can compute a maximum size. */
3278 if (size == -1)
3280 size_tree = lang_hooks.types.max_size (type);
3282 if (size_tree && tree_fits_uhwi_p (size_tree))
3283 size = tree_to_uhwi (size_tree);
3286 return size;
3289 /* Return the bit position of FIELD, in bits from the start of the record.
3290 This is a tree of type bitsizetype. */
3292 tree
3293 bit_position (const_tree field)
3295 return bit_from_pos (DECL_FIELD_OFFSET (field),
3296 DECL_FIELD_BIT_OFFSET (field));
3299 /* Return the byte position of FIELD, in bytes from the start of the record.
3300 This is a tree of type sizetype. */
3302 tree
3303 byte_position (const_tree field)
3305 return byte_from_pos (DECL_FIELD_OFFSET (field),
3306 DECL_FIELD_BIT_OFFSET (field));
3309 /* Likewise, but return as an integer. It must be representable in
3310 that way (since it could be a signed value, we don't have the
3311 option of returning -1 like int_size_in_byte can. */
3313 HOST_WIDE_INT
3314 int_byte_position (const_tree field)
3316 return tree_to_shwi (byte_position (field));
3319 /* Return the strictest alignment, in bits, that T is known to have. */
3321 unsigned int
3322 expr_align (const_tree t)
3324 unsigned int align0, align1;
3326 switch (TREE_CODE (t))
3328 CASE_CONVERT: case NON_LVALUE_EXPR:
3329 /* If we have conversions, we know that the alignment of the
3330 object must meet each of the alignments of the types. */
3331 align0 = expr_align (TREE_OPERAND (t, 0));
3332 align1 = TYPE_ALIGN (TREE_TYPE (t));
3333 return MAX (align0, align1);
3335 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3336 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3337 case CLEANUP_POINT_EXPR:
3338 /* These don't change the alignment of an object. */
3339 return expr_align (TREE_OPERAND (t, 0));
3341 case COND_EXPR:
3342 /* The best we can do is say that the alignment is the least aligned
3343 of the two arms. */
3344 align0 = expr_align (TREE_OPERAND (t, 1));
3345 align1 = expr_align (TREE_OPERAND (t, 2));
3346 return MIN (align0, align1);
3348 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3349 meaningfully, it's always 1. */
3350 case LABEL_DECL: case CONST_DECL:
3351 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3352 case FUNCTION_DECL:
3353 gcc_assert (DECL_ALIGN (t) != 0);
3354 return DECL_ALIGN (t);
3356 default:
3357 break;
3360 /* Otherwise take the alignment from that of the type. */
3361 return TYPE_ALIGN (TREE_TYPE (t));
3364 /* Return, as a tree node, the number of elements for TYPE (which is an
3365 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3367 tree
3368 array_type_nelts (const_tree type)
3370 tree index_type, min, max;
3372 /* If they did it with unspecified bounds, then we should have already
3373 given an error about it before we got here. */
3374 if (! TYPE_DOMAIN (type))
3375 return error_mark_node;
3377 index_type = TYPE_DOMAIN (type);
3378 min = TYPE_MIN_VALUE (index_type);
3379 max = TYPE_MAX_VALUE (index_type);
3381 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3382 if (!max)
3383 return error_mark_node;
3385 return (integer_zerop (min)
3386 ? max
3387 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3390 /* If arg is static -- a reference to an object in static storage -- then
3391 return the object. This is not the same as the C meaning of `static'.
3392 If arg isn't static, return NULL. */
3394 tree
3395 staticp (tree arg)
3397 switch (TREE_CODE (arg))
3399 case FUNCTION_DECL:
3400 /* Nested functions are static, even though taking their address will
3401 involve a trampoline as we unnest the nested function and create
3402 the trampoline on the tree level. */
3403 return arg;
3405 case VAR_DECL:
3406 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3407 && ! DECL_THREAD_LOCAL_P (arg)
3408 && ! DECL_DLLIMPORT_P (arg)
3409 ? arg : NULL);
3411 case CONST_DECL:
3412 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3413 ? arg : NULL);
3415 case CONSTRUCTOR:
3416 return TREE_STATIC (arg) ? arg : NULL;
3418 case LABEL_DECL:
3419 case STRING_CST:
3420 return arg;
3422 case COMPONENT_REF:
3423 /* If the thing being referenced is not a field, then it is
3424 something language specific. */
3425 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3427 /* If we are referencing a bitfield, we can't evaluate an
3428 ADDR_EXPR at compile time and so it isn't a constant. */
3429 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3430 return NULL;
3432 return staticp (TREE_OPERAND (arg, 0));
3434 case BIT_FIELD_REF:
3435 return NULL;
3437 case INDIRECT_REF:
3438 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3440 case ARRAY_REF:
3441 case ARRAY_RANGE_REF:
3442 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3443 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3444 return staticp (TREE_OPERAND (arg, 0));
3445 else
3446 return NULL;
3448 case COMPOUND_LITERAL_EXPR:
3449 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3451 default:
3452 return NULL;
3459 /* Return whether OP is a DECL whose address is function-invariant. */
3461 bool
3462 decl_address_invariant_p (const_tree op)
3464 /* The conditions below are slightly less strict than the one in
3465 staticp. */
3467 switch (TREE_CODE (op))
3469 case PARM_DECL:
3470 case RESULT_DECL:
3471 case LABEL_DECL:
3472 case FUNCTION_DECL:
3473 return true;
3475 case VAR_DECL:
3476 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3477 || DECL_THREAD_LOCAL_P (op)
3478 || DECL_CONTEXT (op) == current_function_decl
3479 || decl_function_context (op) == current_function_decl)
3480 return true;
3481 break;
3483 case CONST_DECL:
3484 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3485 || decl_function_context (op) == current_function_decl)
3486 return true;
3487 break;
3489 default:
3490 break;
3493 return false;
3496 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3498 bool
3499 decl_address_ip_invariant_p (const_tree op)
3501 /* The conditions below are slightly less strict than the one in
3502 staticp. */
3504 switch (TREE_CODE (op))
3506 case LABEL_DECL:
3507 case FUNCTION_DECL:
3508 case STRING_CST:
3509 return true;
3511 case VAR_DECL:
3512 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3513 && !DECL_DLLIMPORT_P (op))
3514 || DECL_THREAD_LOCAL_P (op))
3515 return true;
3516 break;
3518 case CONST_DECL:
3519 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3520 return true;
3521 break;
3523 default:
3524 break;
3527 return false;
3531 /* Return true if T is function-invariant (internal function, does
3532 not handle arithmetic; that's handled in skip_simple_arithmetic and
3533 tree_invariant_p). */
3535 static bool
3536 tree_invariant_p_1 (tree t)
3538 tree op;
3540 if (TREE_CONSTANT (t)
3541 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3542 return true;
3544 switch (TREE_CODE (t))
3546 case SAVE_EXPR:
3547 return true;
3549 case ADDR_EXPR:
3550 op = TREE_OPERAND (t, 0);
3551 while (handled_component_p (op))
3553 switch (TREE_CODE (op))
3555 case ARRAY_REF:
3556 case ARRAY_RANGE_REF:
3557 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3558 || TREE_OPERAND (op, 2) != NULL_TREE
3559 || TREE_OPERAND (op, 3) != NULL_TREE)
3560 return false;
3561 break;
3563 case COMPONENT_REF:
3564 if (TREE_OPERAND (op, 2) != NULL_TREE)
3565 return false;
3566 break;
3568 default:;
3570 op = TREE_OPERAND (op, 0);
3573 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3575 default:
3576 break;
3579 return false;
3582 /* Return true if T is function-invariant. */
3584 bool
3585 tree_invariant_p (tree t)
3587 tree inner = skip_simple_arithmetic (t);
3588 return tree_invariant_p_1 (inner);
3591 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3592 Do this to any expression which may be used in more than one place,
3593 but must be evaluated only once.
3595 Normally, expand_expr would reevaluate the expression each time.
3596 Calling save_expr produces something that is evaluated and recorded
3597 the first time expand_expr is called on it. Subsequent calls to
3598 expand_expr just reuse the recorded value.
3600 The call to expand_expr that generates code that actually computes
3601 the value is the first call *at compile time*. Subsequent calls
3602 *at compile time* generate code to use the saved value.
3603 This produces correct result provided that *at run time* control
3604 always flows through the insns made by the first expand_expr
3605 before reaching the other places where the save_expr was evaluated.
3606 You, the caller of save_expr, must make sure this is so.
3608 Constants, and certain read-only nodes, are returned with no
3609 SAVE_EXPR because that is safe. Expressions containing placeholders
3610 are not touched; see tree.def for an explanation of what these
3611 are used for. */
3613 tree
3614 save_expr (tree expr)
3616 tree inner;
3618 /* If the tree evaluates to a constant, then we don't want to hide that
3619 fact (i.e. this allows further folding, and direct checks for constants).
3620 However, a read-only object that has side effects cannot be bypassed.
3621 Since it is no problem to reevaluate literals, we just return the
3622 literal node. */
3623 inner = skip_simple_arithmetic (expr);
3624 if (TREE_CODE (inner) == ERROR_MARK)
3625 return inner;
3627 if (tree_invariant_p_1 (inner))
3628 return expr;
3630 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3631 it means that the size or offset of some field of an object depends on
3632 the value within another field.
3634 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3635 and some variable since it would then need to be both evaluated once and
3636 evaluated more than once. Front-ends must assure this case cannot
3637 happen by surrounding any such subexpressions in their own SAVE_EXPR
3638 and forcing evaluation at the proper time. */
3639 if (contains_placeholder_p (inner))
3640 return expr;
3642 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3644 /* This expression might be placed ahead of a jump to ensure that the
3645 value was computed on both sides of the jump. So make sure it isn't
3646 eliminated as dead. */
3647 TREE_SIDE_EFFECTS (expr) = 1;
3648 return expr;
3651 /* Look inside EXPR into any simple arithmetic operations. Return the
3652 outermost non-arithmetic or non-invariant node. */
3654 tree
3655 skip_simple_arithmetic (tree expr)
3657 /* We don't care about whether this can be used as an lvalue in this
3658 context. */
3659 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3660 expr = TREE_OPERAND (expr, 0);
3662 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3663 a constant, it will be more efficient to not make another SAVE_EXPR since
3664 it will allow better simplification and GCSE will be able to merge the
3665 computations if they actually occur. */
3666 while (true)
3668 if (UNARY_CLASS_P (expr))
3669 expr = TREE_OPERAND (expr, 0);
3670 else if (BINARY_CLASS_P (expr))
3672 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3673 expr = TREE_OPERAND (expr, 0);
3674 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3675 expr = TREE_OPERAND (expr, 1);
3676 else
3677 break;
3679 else
3680 break;
3683 return expr;
3686 /* Look inside EXPR into simple arithmetic operations involving constants.
3687 Return the outermost non-arithmetic or non-constant node. */
3689 tree
3690 skip_simple_constant_arithmetic (tree expr)
3692 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3693 expr = TREE_OPERAND (expr, 0);
3695 while (true)
3697 if (UNARY_CLASS_P (expr))
3698 expr = TREE_OPERAND (expr, 0);
3699 else if (BINARY_CLASS_P (expr))
3701 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3702 expr = TREE_OPERAND (expr, 0);
3703 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3704 expr = TREE_OPERAND (expr, 1);
3705 else
3706 break;
3708 else
3709 break;
3712 return expr;
3715 /* Return which tree structure is used by T. */
3717 enum tree_node_structure_enum
3718 tree_node_structure (const_tree t)
3720 const enum tree_code code = TREE_CODE (t);
3721 return tree_node_structure_for_code (code);
3724 /* Set various status flags when building a CALL_EXPR object T. */
3726 static void
3727 process_call_operands (tree t)
3729 bool side_effects = TREE_SIDE_EFFECTS (t);
3730 bool read_only = false;
3731 int i = call_expr_flags (t);
3733 /* Calls have side-effects, except those to const or pure functions. */
3734 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3735 side_effects = true;
3736 /* Propagate TREE_READONLY of arguments for const functions. */
3737 if (i & ECF_CONST)
3738 read_only = true;
3740 if (!side_effects || read_only)
3741 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3743 tree op = TREE_OPERAND (t, i);
3744 if (op && TREE_SIDE_EFFECTS (op))
3745 side_effects = true;
3746 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3747 read_only = false;
3750 TREE_SIDE_EFFECTS (t) = side_effects;
3751 TREE_READONLY (t) = read_only;
3754 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3755 size or offset that depends on a field within a record. */
3757 bool
3758 contains_placeholder_p (const_tree exp)
3760 enum tree_code code;
3762 if (!exp)
3763 return 0;
3765 code = TREE_CODE (exp);
3766 if (code == PLACEHOLDER_EXPR)
3767 return 1;
3769 switch (TREE_CODE_CLASS (code))
3771 case tcc_reference:
3772 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3773 position computations since they will be converted into a
3774 WITH_RECORD_EXPR involving the reference, which will assume
3775 here will be valid. */
3776 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3778 case tcc_exceptional:
3779 if (code == TREE_LIST)
3780 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3781 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3782 break;
3784 case tcc_unary:
3785 case tcc_binary:
3786 case tcc_comparison:
3787 case tcc_expression:
3788 switch (code)
3790 case COMPOUND_EXPR:
3791 /* Ignoring the first operand isn't quite right, but works best. */
3792 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3794 case COND_EXPR:
3795 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3796 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3797 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3799 case SAVE_EXPR:
3800 /* The save_expr function never wraps anything containing
3801 a PLACEHOLDER_EXPR. */
3802 return 0;
3804 default:
3805 break;
3808 switch (TREE_CODE_LENGTH (code))
3810 case 1:
3811 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3812 case 2:
3813 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3814 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3815 default:
3816 return 0;
3819 case tcc_vl_exp:
3820 switch (code)
3822 case CALL_EXPR:
3824 const_tree arg;
3825 const_call_expr_arg_iterator iter;
3826 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3827 if (CONTAINS_PLACEHOLDER_P (arg))
3828 return 1;
3829 return 0;
3831 default:
3832 return 0;
3835 default:
3836 return 0;
3838 return 0;
3841 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3842 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3843 field positions. */
3845 static bool
3846 type_contains_placeholder_1 (const_tree type)
3848 /* If the size contains a placeholder or the parent type (component type in
3849 the case of arrays) type involves a placeholder, this type does. */
3850 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3851 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3852 || (!POINTER_TYPE_P (type)
3853 && TREE_TYPE (type)
3854 && type_contains_placeholder_p (TREE_TYPE (type))))
3855 return true;
3857 /* Now do type-specific checks. Note that the last part of the check above
3858 greatly limits what we have to do below. */
3859 switch (TREE_CODE (type))
3861 case VOID_TYPE:
3862 case COMPLEX_TYPE:
3863 case ENUMERAL_TYPE:
3864 case BOOLEAN_TYPE:
3865 case POINTER_TYPE:
3866 case OFFSET_TYPE:
3867 case REFERENCE_TYPE:
3868 case METHOD_TYPE:
3869 case FUNCTION_TYPE:
3870 case VECTOR_TYPE:
3871 case NULLPTR_TYPE:
3872 return false;
3874 case INTEGER_TYPE:
3875 case REAL_TYPE:
3876 case FIXED_POINT_TYPE:
3877 /* Here we just check the bounds. */
3878 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3879 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3881 case ARRAY_TYPE:
3882 /* We have already checked the component type above, so just check
3883 the domain type. Flexible array members have a null domain. */
3884 return TYPE_DOMAIN (type) ?
3885 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3887 case RECORD_TYPE:
3888 case UNION_TYPE:
3889 case QUAL_UNION_TYPE:
3891 tree field;
3893 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3894 if (TREE_CODE (field) == FIELD_DECL
3895 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3896 || (TREE_CODE (type) == QUAL_UNION_TYPE
3897 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3898 || type_contains_placeholder_p (TREE_TYPE (field))))
3899 return true;
3901 return false;
3904 default:
3905 gcc_unreachable ();
3909 /* Wrapper around above function used to cache its result. */
3911 bool
3912 type_contains_placeholder_p (tree type)
3914 bool result;
3916 /* If the contains_placeholder_bits field has been initialized,
3917 then we know the answer. */
3918 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3919 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3921 /* Indicate that we've seen this type node, and the answer is false.
3922 This is what we want to return if we run into recursion via fields. */
3923 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3925 /* Compute the real value. */
3926 result = type_contains_placeholder_1 (type);
3928 /* Store the real value. */
3929 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3931 return result;
3934 /* Push tree EXP onto vector QUEUE if it is not already present. */
3936 static void
3937 push_without_duplicates (tree exp, vec<tree> *queue)
3939 unsigned int i;
3940 tree iter;
3942 FOR_EACH_VEC_ELT (*queue, i, iter)
3943 if (simple_cst_equal (iter, exp) == 1)
3944 break;
3946 if (!iter)
3947 queue->safe_push (exp);
3950 /* Given a tree EXP, find all occurrences of references to fields
3951 in a PLACEHOLDER_EXPR and place them in vector REFS without
3952 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3953 we assume here that EXP contains only arithmetic expressions
3954 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3955 argument list. */
3957 void
3958 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3960 enum tree_code code = TREE_CODE (exp);
3961 tree inner;
3962 int i;
3964 /* We handle TREE_LIST and COMPONENT_REF separately. */
3965 if (code == TREE_LIST)
3967 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3968 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3970 else if (code == COMPONENT_REF)
3972 for (inner = TREE_OPERAND (exp, 0);
3973 REFERENCE_CLASS_P (inner);
3974 inner = TREE_OPERAND (inner, 0))
3977 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3978 push_without_duplicates (exp, refs);
3979 else
3980 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3982 else
3983 switch (TREE_CODE_CLASS (code))
3985 case tcc_constant:
3986 break;
3988 case tcc_declaration:
3989 /* Variables allocated to static storage can stay. */
3990 if (!TREE_STATIC (exp))
3991 push_without_duplicates (exp, refs);
3992 break;
3994 case tcc_expression:
3995 /* This is the pattern built in ada/make_aligning_type. */
3996 if (code == ADDR_EXPR
3997 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3999 push_without_duplicates (exp, refs);
4000 break;
4003 /* Fall through. */
4005 case tcc_exceptional:
4006 case tcc_unary:
4007 case tcc_binary:
4008 case tcc_comparison:
4009 case tcc_reference:
4010 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4011 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4012 break;
4014 case tcc_vl_exp:
4015 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4016 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4017 break;
4019 default:
4020 gcc_unreachable ();
4024 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4025 return a tree with all occurrences of references to F in a
4026 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4027 CONST_DECLs. Note that we assume here that EXP contains only
4028 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4029 occurring only in their argument list. */
4031 tree
4032 substitute_in_expr (tree exp, tree f, tree r)
4034 enum tree_code code = TREE_CODE (exp);
4035 tree op0, op1, op2, op3;
4036 tree new_tree;
4038 /* We handle TREE_LIST and COMPONENT_REF separately. */
4039 if (code == TREE_LIST)
4041 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4042 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4043 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4044 return exp;
4046 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4048 else if (code == COMPONENT_REF)
4050 tree inner;
4052 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4053 and it is the right field, replace it with R. */
4054 for (inner = TREE_OPERAND (exp, 0);
4055 REFERENCE_CLASS_P (inner);
4056 inner = TREE_OPERAND (inner, 0))
4059 /* The field. */
4060 op1 = TREE_OPERAND (exp, 1);
4062 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4063 return r;
4065 /* If this expression hasn't been completed let, leave it alone. */
4066 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4067 return exp;
4069 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4070 if (op0 == TREE_OPERAND (exp, 0))
4071 return exp;
4073 new_tree
4074 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4076 else
4077 switch (TREE_CODE_CLASS (code))
4079 case tcc_constant:
4080 return exp;
4082 case tcc_declaration:
4083 if (exp == f)
4084 return r;
4085 else
4086 return exp;
4088 case tcc_expression:
4089 if (exp == f)
4090 return r;
4092 /* Fall through. */
4094 case tcc_exceptional:
4095 case tcc_unary:
4096 case tcc_binary:
4097 case tcc_comparison:
4098 case tcc_reference:
4099 switch (TREE_CODE_LENGTH (code))
4101 case 0:
4102 return exp;
4104 case 1:
4105 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4106 if (op0 == TREE_OPERAND (exp, 0))
4107 return exp;
4109 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4110 break;
4112 case 2:
4113 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4114 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4116 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4117 return exp;
4119 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4120 break;
4122 case 3:
4123 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4124 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4125 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4127 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4128 && op2 == TREE_OPERAND (exp, 2))
4129 return exp;
4131 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4132 break;
4134 case 4:
4135 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4136 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4137 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4138 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4140 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4141 && op2 == TREE_OPERAND (exp, 2)
4142 && op3 == TREE_OPERAND (exp, 3))
4143 return exp;
4145 new_tree
4146 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4147 break;
4149 default:
4150 gcc_unreachable ();
4152 break;
4154 case tcc_vl_exp:
4156 int i;
4158 new_tree = NULL_TREE;
4160 /* If we are trying to replace F with a constant or with another
4161 instance of one of the arguments of the call, inline back
4162 functions which do nothing else than computing a value from
4163 the arguments they are passed. This makes it possible to
4164 fold partially or entirely the replacement expression. */
4165 if (code == CALL_EXPR)
4167 bool maybe_inline = false;
4168 if (CONSTANT_CLASS_P (r))
4169 maybe_inline = true;
4170 else
4171 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4172 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4174 maybe_inline = true;
4175 break;
4177 if (maybe_inline)
4179 tree t = maybe_inline_call_in_expr (exp);
4180 if (t)
4181 return SUBSTITUTE_IN_EXPR (t, f, r);
4185 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4187 tree op = TREE_OPERAND (exp, i);
4188 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4189 if (new_op != op)
4191 if (!new_tree)
4192 new_tree = copy_node (exp);
4193 TREE_OPERAND (new_tree, i) = new_op;
4197 if (new_tree)
4199 new_tree = fold (new_tree);
4200 if (TREE_CODE (new_tree) == CALL_EXPR)
4201 process_call_operands (new_tree);
4203 else
4204 return exp;
4206 break;
4208 default:
4209 gcc_unreachable ();
4212 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4214 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4215 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4217 return new_tree;
4220 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4221 for it within OBJ, a tree that is an object or a chain of references. */
4223 tree
4224 substitute_placeholder_in_expr (tree exp, tree obj)
4226 enum tree_code code = TREE_CODE (exp);
4227 tree op0, op1, op2, op3;
4228 tree new_tree;
4230 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4231 in the chain of OBJ. */
4232 if (code == PLACEHOLDER_EXPR)
4234 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4235 tree elt;
4237 for (elt = obj; elt != 0;
4238 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4239 || TREE_CODE (elt) == COND_EXPR)
4240 ? TREE_OPERAND (elt, 1)
4241 : (REFERENCE_CLASS_P (elt)
4242 || UNARY_CLASS_P (elt)
4243 || BINARY_CLASS_P (elt)
4244 || VL_EXP_CLASS_P (elt)
4245 || EXPRESSION_CLASS_P (elt))
4246 ? TREE_OPERAND (elt, 0) : 0))
4247 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4248 return elt;
4250 for (elt = obj; elt != 0;
4251 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4252 || TREE_CODE (elt) == COND_EXPR)
4253 ? TREE_OPERAND (elt, 1)
4254 : (REFERENCE_CLASS_P (elt)
4255 || UNARY_CLASS_P (elt)
4256 || BINARY_CLASS_P (elt)
4257 || VL_EXP_CLASS_P (elt)
4258 || EXPRESSION_CLASS_P (elt))
4259 ? TREE_OPERAND (elt, 0) : 0))
4260 if (POINTER_TYPE_P (TREE_TYPE (elt))
4261 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4262 == need_type))
4263 return fold_build1 (INDIRECT_REF, need_type, elt);
4265 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4266 survives until RTL generation, there will be an error. */
4267 return exp;
4270 /* TREE_LIST is special because we need to look at TREE_VALUE
4271 and TREE_CHAIN, not TREE_OPERANDS. */
4272 else if (code == TREE_LIST)
4274 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4275 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4276 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4277 return exp;
4279 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4281 else
4282 switch (TREE_CODE_CLASS (code))
4284 case tcc_constant:
4285 case tcc_declaration:
4286 return exp;
4288 case tcc_exceptional:
4289 case tcc_unary:
4290 case tcc_binary:
4291 case tcc_comparison:
4292 case tcc_expression:
4293 case tcc_reference:
4294 case tcc_statement:
4295 switch (TREE_CODE_LENGTH (code))
4297 case 0:
4298 return exp;
4300 case 1:
4301 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4302 if (op0 == TREE_OPERAND (exp, 0))
4303 return exp;
4305 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4306 break;
4308 case 2:
4309 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4310 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4312 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4313 return exp;
4315 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4316 break;
4318 case 3:
4319 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4320 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4321 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4323 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4324 && op2 == TREE_OPERAND (exp, 2))
4325 return exp;
4327 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4328 break;
4330 case 4:
4331 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4332 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4333 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4334 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4336 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4337 && op2 == TREE_OPERAND (exp, 2)
4338 && op3 == TREE_OPERAND (exp, 3))
4339 return exp;
4341 new_tree
4342 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4343 break;
4345 default:
4346 gcc_unreachable ();
4348 break;
4350 case tcc_vl_exp:
4352 int i;
4354 new_tree = NULL_TREE;
4356 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4358 tree op = TREE_OPERAND (exp, i);
4359 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4360 if (new_op != op)
4362 if (!new_tree)
4363 new_tree = copy_node (exp);
4364 TREE_OPERAND (new_tree, i) = new_op;
4368 if (new_tree)
4370 new_tree = fold (new_tree);
4371 if (TREE_CODE (new_tree) == CALL_EXPR)
4372 process_call_operands (new_tree);
4374 else
4375 return exp;
4377 break;
4379 default:
4380 gcc_unreachable ();
4383 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4385 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4386 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4388 return new_tree;
4392 /* Subroutine of stabilize_reference; this is called for subtrees of
4393 references. Any expression with side-effects must be put in a SAVE_EXPR
4394 to ensure that it is only evaluated once.
4396 We don't put SAVE_EXPR nodes around everything, because assigning very
4397 simple expressions to temporaries causes us to miss good opportunities
4398 for optimizations. Among other things, the opportunity to fold in the
4399 addition of a constant into an addressing mode often gets lost, e.g.
4400 "y[i+1] += x;". In general, we take the approach that we should not make
4401 an assignment unless we are forced into it - i.e., that any non-side effect
4402 operator should be allowed, and that cse should take care of coalescing
4403 multiple utterances of the same expression should that prove fruitful. */
4405 static tree
4406 stabilize_reference_1 (tree e)
4408 tree result;
4409 enum tree_code code = TREE_CODE (e);
4411 /* We cannot ignore const expressions because it might be a reference
4412 to a const array but whose index contains side-effects. But we can
4413 ignore things that are actual constant or that already have been
4414 handled by this function. */
4416 if (tree_invariant_p (e))
4417 return e;
4419 switch (TREE_CODE_CLASS (code))
4421 case tcc_exceptional:
4422 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4423 have side-effects. */
4424 if (code == STATEMENT_LIST)
4425 return save_expr (e);
4426 /* FALLTHRU */
4427 case tcc_type:
4428 case tcc_declaration:
4429 case tcc_comparison:
4430 case tcc_statement:
4431 case tcc_expression:
4432 case tcc_reference:
4433 case tcc_vl_exp:
4434 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4435 so that it will only be evaluated once. */
4436 /* The reference (r) and comparison (<) classes could be handled as
4437 below, but it is generally faster to only evaluate them once. */
4438 if (TREE_SIDE_EFFECTS (e))
4439 return save_expr (e);
4440 return e;
4442 case tcc_constant:
4443 /* Constants need no processing. In fact, we should never reach
4444 here. */
4445 return e;
4447 case tcc_binary:
4448 /* Division is slow and tends to be compiled with jumps,
4449 especially the division by powers of 2 that is often
4450 found inside of an array reference. So do it just once. */
4451 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4452 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4453 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4454 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4455 return save_expr (e);
4456 /* Recursively stabilize each operand. */
4457 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4458 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4459 break;
4461 case tcc_unary:
4462 /* Recursively stabilize each operand. */
4463 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4464 break;
4466 default:
4467 gcc_unreachable ();
4470 TREE_TYPE (result) = TREE_TYPE (e);
4471 TREE_READONLY (result) = TREE_READONLY (e);
4472 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4473 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4475 return result;
4478 /* Stabilize a reference so that we can use it any number of times
4479 without causing its operands to be evaluated more than once.
4480 Returns the stabilized reference. This works by means of save_expr,
4481 so see the caveats in the comments about save_expr.
4483 Also allows conversion expressions whose operands are references.
4484 Any other kind of expression is returned unchanged. */
4486 tree
4487 stabilize_reference (tree ref)
4489 tree result;
4490 enum tree_code code = TREE_CODE (ref);
4492 switch (code)
4494 case VAR_DECL:
4495 case PARM_DECL:
4496 case RESULT_DECL:
4497 /* No action is needed in this case. */
4498 return ref;
4500 CASE_CONVERT:
4501 case FLOAT_EXPR:
4502 case FIX_TRUNC_EXPR:
4503 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4504 break;
4506 case INDIRECT_REF:
4507 result = build_nt (INDIRECT_REF,
4508 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4509 break;
4511 case COMPONENT_REF:
4512 result = build_nt (COMPONENT_REF,
4513 stabilize_reference (TREE_OPERAND (ref, 0)),
4514 TREE_OPERAND (ref, 1), NULL_TREE);
4515 break;
4517 case BIT_FIELD_REF:
4518 result = build_nt (BIT_FIELD_REF,
4519 stabilize_reference (TREE_OPERAND (ref, 0)),
4520 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4521 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4522 break;
4524 case ARRAY_REF:
4525 result = build_nt (ARRAY_REF,
4526 stabilize_reference (TREE_OPERAND (ref, 0)),
4527 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4528 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4529 break;
4531 case ARRAY_RANGE_REF:
4532 result = build_nt (ARRAY_RANGE_REF,
4533 stabilize_reference (TREE_OPERAND (ref, 0)),
4534 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4535 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4536 break;
4538 case COMPOUND_EXPR:
4539 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4540 it wouldn't be ignored. This matters when dealing with
4541 volatiles. */
4542 return stabilize_reference_1 (ref);
4544 /* If arg isn't a kind of lvalue we recognize, make no change.
4545 Caller should recognize the error for an invalid lvalue. */
4546 default:
4547 return ref;
4549 case ERROR_MARK:
4550 return error_mark_node;
4553 TREE_TYPE (result) = TREE_TYPE (ref);
4554 TREE_READONLY (result) = TREE_READONLY (ref);
4555 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4556 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4558 return result;
4561 /* Low-level constructors for expressions. */
4563 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4564 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4566 void
4567 recompute_tree_invariant_for_addr_expr (tree t)
4569 tree node;
4570 bool tc = true, se = false;
4572 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4574 /* We started out assuming this address is both invariant and constant, but
4575 does not have side effects. Now go down any handled components and see if
4576 any of them involve offsets that are either non-constant or non-invariant.
4577 Also check for side-effects.
4579 ??? Note that this code makes no attempt to deal with the case where
4580 taking the address of something causes a copy due to misalignment. */
4582 #define UPDATE_FLAGS(NODE) \
4583 do { tree _node = (NODE); \
4584 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4585 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4587 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4588 node = TREE_OPERAND (node, 0))
4590 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4591 array reference (probably made temporarily by the G++ front end),
4592 so ignore all the operands. */
4593 if ((TREE_CODE (node) == ARRAY_REF
4594 || TREE_CODE (node) == ARRAY_RANGE_REF)
4595 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4597 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4598 if (TREE_OPERAND (node, 2))
4599 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4600 if (TREE_OPERAND (node, 3))
4601 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4603 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4604 FIELD_DECL, apparently. The G++ front end can put something else
4605 there, at least temporarily. */
4606 else if (TREE_CODE (node) == COMPONENT_REF
4607 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4609 if (TREE_OPERAND (node, 2))
4610 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4614 node = lang_hooks.expr_to_decl (node, &tc, &se);
4616 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4617 the address, since &(*a)->b is a form of addition. If it's a constant, the
4618 address is constant too. If it's a decl, its address is constant if the
4619 decl is static. Everything else is not constant and, furthermore,
4620 taking the address of a volatile variable is not volatile. */
4621 if (TREE_CODE (node) == INDIRECT_REF
4622 || TREE_CODE (node) == MEM_REF)
4623 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4624 else if (CONSTANT_CLASS_P (node))
4626 else if (DECL_P (node))
4627 tc &= (staticp (node) != NULL_TREE);
4628 else
4630 tc = false;
4631 se |= TREE_SIDE_EFFECTS (node);
4635 TREE_CONSTANT (t) = tc;
4636 TREE_SIDE_EFFECTS (t) = se;
4637 #undef UPDATE_FLAGS
4640 /* Build an expression of code CODE, data type TYPE, and operands as
4641 specified. Expressions and reference nodes can be created this way.
4642 Constants, decls, types and misc nodes cannot be.
4644 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4645 enough for all extant tree codes. */
4647 tree
4648 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4650 tree t;
4652 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4654 t = make_node (code PASS_MEM_STAT);
4655 TREE_TYPE (t) = tt;
4657 return t;
4660 tree
4661 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4663 int length = sizeof (struct tree_exp);
4664 tree t;
4666 record_node_allocation_statistics (code, length);
4668 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4670 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4672 memset (t, 0, sizeof (struct tree_common));
4674 TREE_SET_CODE (t, code);
4676 TREE_TYPE (t) = type;
4677 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4678 TREE_OPERAND (t, 0) = node;
4679 if (node && !TYPE_P (node))
4681 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4682 TREE_READONLY (t) = TREE_READONLY (node);
4685 if (TREE_CODE_CLASS (code) == tcc_statement)
4687 if (code != DEBUG_BEGIN_STMT)
4688 TREE_SIDE_EFFECTS (t) = 1;
4690 else switch (code)
4692 case VA_ARG_EXPR:
4693 /* All of these have side-effects, no matter what their
4694 operands are. */
4695 TREE_SIDE_EFFECTS (t) = 1;
4696 TREE_READONLY (t) = 0;
4697 break;
4699 case INDIRECT_REF:
4700 /* Whether a dereference is readonly has nothing to do with whether
4701 its operand is readonly. */
4702 TREE_READONLY (t) = 0;
4703 break;
4705 case ADDR_EXPR:
4706 if (node)
4707 recompute_tree_invariant_for_addr_expr (t);
4708 break;
4710 default:
4711 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4712 && node && !TYPE_P (node)
4713 && TREE_CONSTANT (node))
4714 TREE_CONSTANT (t) = 1;
4715 if (TREE_CODE_CLASS (code) == tcc_reference
4716 && node && TREE_THIS_VOLATILE (node))
4717 TREE_THIS_VOLATILE (t) = 1;
4718 break;
4721 return t;
4724 #define PROCESS_ARG(N) \
4725 do { \
4726 TREE_OPERAND (t, N) = arg##N; \
4727 if (arg##N &&!TYPE_P (arg##N)) \
4729 if (TREE_SIDE_EFFECTS (arg##N)) \
4730 side_effects = 1; \
4731 if (!TREE_READONLY (arg##N) \
4732 && !CONSTANT_CLASS_P (arg##N)) \
4733 (void) (read_only = 0); \
4734 if (!TREE_CONSTANT (arg##N)) \
4735 (void) (constant = 0); \
4737 } while (0)
4739 tree
4740 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4742 bool constant, read_only, side_effects, div_by_zero;
4743 tree t;
4745 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4747 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4748 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4749 /* When sizetype precision doesn't match that of pointers
4750 we need to be able to build explicit extensions or truncations
4751 of the offset argument. */
4752 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4753 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4754 && TREE_CODE (arg1) == INTEGER_CST);
4756 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4757 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4758 && ptrofftype_p (TREE_TYPE (arg1)));
4760 t = make_node (code PASS_MEM_STAT);
4761 TREE_TYPE (t) = tt;
4763 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4764 result based on those same flags for the arguments. But if the
4765 arguments aren't really even `tree' expressions, we shouldn't be trying
4766 to do this. */
4768 /* Expressions without side effects may be constant if their
4769 arguments are as well. */
4770 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4771 || TREE_CODE_CLASS (code) == tcc_binary);
4772 read_only = 1;
4773 side_effects = TREE_SIDE_EFFECTS (t);
4775 switch (code)
4777 case TRUNC_DIV_EXPR:
4778 case CEIL_DIV_EXPR:
4779 case FLOOR_DIV_EXPR:
4780 case ROUND_DIV_EXPR:
4781 case EXACT_DIV_EXPR:
4782 case CEIL_MOD_EXPR:
4783 case FLOOR_MOD_EXPR:
4784 case ROUND_MOD_EXPR:
4785 case TRUNC_MOD_EXPR:
4786 div_by_zero = integer_zerop (arg1);
4787 break;
4788 default:
4789 div_by_zero = false;
4792 PROCESS_ARG (0);
4793 PROCESS_ARG (1);
4795 TREE_SIDE_EFFECTS (t) = side_effects;
4796 if (code == MEM_REF)
4798 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4800 tree o = TREE_OPERAND (arg0, 0);
4801 TREE_READONLY (t) = TREE_READONLY (o);
4802 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4805 else
4807 TREE_READONLY (t) = read_only;
4808 /* Don't mark X / 0 as constant. */
4809 TREE_CONSTANT (t) = constant && !div_by_zero;
4810 TREE_THIS_VOLATILE (t)
4811 = (TREE_CODE_CLASS (code) == tcc_reference
4812 && arg0 && TREE_THIS_VOLATILE (arg0));
4815 return t;
4819 tree
4820 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4821 tree arg2 MEM_STAT_DECL)
4823 bool constant, read_only, side_effects;
4824 tree t;
4826 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4827 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4829 t = make_node (code PASS_MEM_STAT);
4830 TREE_TYPE (t) = tt;
4832 read_only = 1;
4834 /* As a special exception, if COND_EXPR has NULL branches, we
4835 assume that it is a gimple statement and always consider
4836 it to have side effects. */
4837 if (code == COND_EXPR
4838 && tt == void_type_node
4839 && arg1 == NULL_TREE
4840 && arg2 == NULL_TREE)
4841 side_effects = true;
4842 else
4843 side_effects = TREE_SIDE_EFFECTS (t);
4845 PROCESS_ARG (0);
4846 PROCESS_ARG (1);
4847 PROCESS_ARG (2);
4849 if (code == COND_EXPR)
4850 TREE_READONLY (t) = read_only;
4852 TREE_SIDE_EFFECTS (t) = side_effects;
4853 TREE_THIS_VOLATILE (t)
4854 = (TREE_CODE_CLASS (code) == tcc_reference
4855 && arg0 && TREE_THIS_VOLATILE (arg0));
4857 return t;
4860 tree
4861 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4862 tree arg2, tree arg3 MEM_STAT_DECL)
4864 bool constant, read_only, side_effects;
4865 tree t;
4867 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4869 t = make_node (code PASS_MEM_STAT);
4870 TREE_TYPE (t) = tt;
4872 side_effects = TREE_SIDE_EFFECTS (t);
4874 PROCESS_ARG (0);
4875 PROCESS_ARG (1);
4876 PROCESS_ARG (2);
4877 PROCESS_ARG (3);
4879 TREE_SIDE_EFFECTS (t) = side_effects;
4880 TREE_THIS_VOLATILE (t)
4881 = (TREE_CODE_CLASS (code) == tcc_reference
4882 && arg0 && TREE_THIS_VOLATILE (arg0));
4884 return t;
4887 tree
4888 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4889 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4891 bool constant, read_only, side_effects;
4892 tree t;
4894 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4896 t = make_node (code PASS_MEM_STAT);
4897 TREE_TYPE (t) = tt;
4899 side_effects = TREE_SIDE_EFFECTS (t);
4901 PROCESS_ARG (0);
4902 PROCESS_ARG (1);
4903 PROCESS_ARG (2);
4904 PROCESS_ARG (3);
4905 PROCESS_ARG (4);
4907 TREE_SIDE_EFFECTS (t) = side_effects;
4908 if (code == TARGET_MEM_REF)
4910 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4912 tree o = TREE_OPERAND (arg0, 0);
4913 TREE_READONLY (t) = TREE_READONLY (o);
4914 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4917 else
4918 TREE_THIS_VOLATILE (t)
4919 = (TREE_CODE_CLASS (code) == tcc_reference
4920 && arg0 && TREE_THIS_VOLATILE (arg0));
4922 return t;
4925 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4926 on the pointer PTR. */
4928 tree
4929 build_simple_mem_ref_loc (location_t loc, tree ptr)
4931 poly_int64 offset = 0;
4932 tree ptype = TREE_TYPE (ptr);
4933 tree tem;
4934 /* For convenience allow addresses that collapse to a simple base
4935 and offset. */
4936 if (TREE_CODE (ptr) == ADDR_EXPR
4937 && (handled_component_p (TREE_OPERAND (ptr, 0))
4938 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4940 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4941 gcc_assert (ptr);
4942 if (TREE_CODE (ptr) == MEM_REF)
4944 offset += mem_ref_offset (ptr).force_shwi ();
4945 ptr = TREE_OPERAND (ptr, 0);
4947 else
4948 ptr = build_fold_addr_expr (ptr);
4949 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4951 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4952 ptr, build_int_cst (ptype, offset));
4953 SET_EXPR_LOCATION (tem, loc);
4954 return tem;
4957 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4959 poly_offset_int
4960 mem_ref_offset (const_tree t)
4962 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4963 SIGNED);
4966 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4967 offsetted by OFFSET units. */
4969 tree
4970 build_invariant_address (tree type, tree base, poly_int64 offset)
4972 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4973 build_fold_addr_expr (base),
4974 build_int_cst (ptr_type_node, offset));
4975 tree addr = build1 (ADDR_EXPR, type, ref);
4976 recompute_tree_invariant_for_addr_expr (addr);
4977 return addr;
4980 /* Similar except don't specify the TREE_TYPE
4981 and leave the TREE_SIDE_EFFECTS as 0.
4982 It is permissible for arguments to be null,
4983 or even garbage if their values do not matter. */
4985 tree
4986 build_nt (enum tree_code code, ...)
4988 tree t;
4989 int length;
4990 int i;
4991 va_list p;
4993 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4995 va_start (p, code);
4997 t = make_node (code);
4998 length = TREE_CODE_LENGTH (code);
5000 for (i = 0; i < length; i++)
5001 TREE_OPERAND (t, i) = va_arg (p, tree);
5003 va_end (p);
5004 return t;
5007 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5008 tree vec. */
5010 tree
5011 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5013 tree ret, t;
5014 unsigned int ix;
5016 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5017 CALL_EXPR_FN (ret) = fn;
5018 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5019 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5020 CALL_EXPR_ARG (ret, ix) = t;
5021 return ret;
5024 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5025 and data type TYPE.
5026 We do NOT enter this node in any sort of symbol table.
5028 LOC is the location of the decl.
5030 layout_decl is used to set up the decl's storage layout.
5031 Other slots are initialized to 0 or null pointers. */
5033 tree
5034 build_decl (location_t loc, enum tree_code code, tree name,
5035 tree type MEM_STAT_DECL)
5037 tree t;
5039 t = make_node (code PASS_MEM_STAT);
5040 DECL_SOURCE_LOCATION (t) = loc;
5042 /* if (type == error_mark_node)
5043 type = integer_type_node; */
5044 /* That is not done, deliberately, so that having error_mark_node
5045 as the type can suppress useless errors in the use of this variable. */
5047 DECL_NAME (t) = name;
5048 TREE_TYPE (t) = type;
5050 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5051 layout_decl (t, 0);
5053 return t;
5056 /* Builds and returns function declaration with NAME and TYPE. */
5058 tree
5059 build_fn_decl (const char *name, tree type)
5061 tree id = get_identifier (name);
5062 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5064 DECL_EXTERNAL (decl) = 1;
5065 TREE_PUBLIC (decl) = 1;
5066 DECL_ARTIFICIAL (decl) = 1;
5067 TREE_NOTHROW (decl) = 1;
5069 return decl;
5072 vec<tree, va_gc> *all_translation_units;
5074 /* Builds a new translation-unit decl with name NAME, queues it in the
5075 global list of translation-unit decls and returns it. */
5077 tree
5078 build_translation_unit_decl (tree name)
5080 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5081 name, NULL_TREE);
5082 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5083 vec_safe_push (all_translation_units, tu);
5084 return tu;
5088 /* BLOCK nodes are used to represent the structure of binding contours
5089 and declarations, once those contours have been exited and their contents
5090 compiled. This information is used for outputting debugging info. */
5092 tree
5093 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5095 tree block = make_node (BLOCK);
5097 BLOCK_VARS (block) = vars;
5098 BLOCK_SUBBLOCKS (block) = subblocks;
5099 BLOCK_SUPERCONTEXT (block) = supercontext;
5100 BLOCK_CHAIN (block) = chain;
5101 return block;
5105 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5107 LOC is the location to use in tree T. */
5109 void
5110 protected_set_expr_location (tree t, location_t loc)
5112 if (CAN_HAVE_LOCATION_P (t))
5113 SET_EXPR_LOCATION (t, loc);
5116 /* Data used when collecting DECLs and TYPEs for language data removal. */
5118 class free_lang_data_d
5120 public:
5121 free_lang_data_d () : decls (100), types (100) {}
5123 /* Worklist to avoid excessive recursion. */
5124 auto_vec<tree> worklist;
5126 /* Set of traversed objects. Used to avoid duplicate visits. */
5127 hash_set<tree> pset;
5129 /* Array of symbols to process with free_lang_data_in_decl. */
5130 auto_vec<tree> decls;
5132 /* Array of types to process with free_lang_data_in_type. */
5133 auto_vec<tree> types;
5137 /* Add type or decl T to one of the list of tree nodes that need their
5138 language data removed. The lists are held inside FLD. */
5140 static void
5141 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5143 if (DECL_P (t))
5144 fld->decls.safe_push (t);
5145 else if (TYPE_P (t))
5146 fld->types.safe_push (t);
5147 else
5148 gcc_unreachable ();
5151 /* Push tree node T into FLD->WORKLIST. */
5153 static inline void
5154 fld_worklist_push (tree t, class free_lang_data_d *fld)
5156 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5157 fld->worklist.safe_push ((t));
5162 /* Return simplified TYPE_NAME of TYPE. */
5164 static tree
5165 fld_simplified_type_name (tree type)
5167 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5168 return TYPE_NAME (type);
5169 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5170 TYPE_DECL if the type doesn't have linkage.
5171 this must match fld_ */
5172 if (type != TYPE_MAIN_VARIANT (type)
5173 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5174 && (TREE_CODE (type) != RECORD_TYPE
5175 || !TYPE_BINFO (type)
5176 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5177 return DECL_NAME (TYPE_NAME (type));
5178 return TYPE_NAME (type);
5181 /* Do same comparsion as check_qualified_type skipping lang part of type
5182 and be more permissive about type names: we only care that names are
5183 same (for diagnostics) and that ODR names are the same.
5184 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5186 static bool
5187 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5189 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5190 /* We want to match incomplete variants with complete types.
5191 In this case we need to ignore alignment. */
5192 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5193 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5194 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5195 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5196 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5197 TYPE_ATTRIBUTES (v))
5198 || (inner_type && TREE_TYPE (v) != inner_type))
5199 return false;
5201 return true;
5204 /* Find variant of FIRST that match T and create new one if necessary.
5205 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5207 static tree
5208 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5209 tree inner_type = NULL)
5211 if (first == TYPE_MAIN_VARIANT (t))
5212 return t;
5213 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5214 if (fld_type_variant_equal_p (t, v, inner_type))
5215 return v;
5216 tree v = build_variant_type_copy (first);
5217 TYPE_READONLY (v) = TYPE_READONLY (t);
5218 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5219 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5220 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5221 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5222 TYPE_NAME (v) = TYPE_NAME (t);
5223 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5224 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5225 /* Variants of incomplete types should have alignment
5226 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5227 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5229 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5230 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5232 if (inner_type)
5233 TREE_TYPE (v) = inner_type;
5234 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5235 if (!fld->pset.add (v))
5236 add_tree_to_fld_list (v, fld);
5237 return v;
5240 /* Map complete types to incomplete types. */
5242 static hash_map<tree, tree> *fld_incomplete_types;
5244 /* Map types to simplified types. */
5246 static hash_map<tree, tree> *fld_simplified_types;
5248 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5249 use MAP to prevent duplicates. */
5251 static tree
5252 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5253 class free_lang_data_d *fld)
5255 if (TREE_TYPE (t) == t2)
5256 return t;
5258 if (TYPE_MAIN_VARIANT (t) != t)
5260 return fld_type_variant
5261 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5262 TYPE_MAIN_VARIANT (t2), map, fld),
5263 t, fld, t2);
5266 bool existed;
5267 tree &array
5268 = map->get_or_insert (t, &existed);
5269 if (!existed)
5271 array = build_array_type_1 (t2, TYPE_DOMAIN (t),
5272 TYPE_TYPELESS_STORAGE (t), false);
5273 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5274 if (!fld->pset.add (array))
5275 add_tree_to_fld_list (array, fld);
5277 return array;
5280 /* Return CTX after removal of contexts that are not relevant */
5282 static tree
5283 fld_decl_context (tree ctx)
5285 /* Variably modified types are needed for tree_is_indexable to decide
5286 whether the type needs to go to local or global section.
5287 This code is semi-broken but for now it is easiest to keep contexts
5288 as expected. */
5289 if (ctx && TYPE_P (ctx)
5290 && !variably_modified_type_p (ctx, NULL_TREE))
5292 while (ctx && TYPE_P (ctx))
5293 ctx = TYPE_CONTEXT (ctx);
5295 return ctx;
5298 /* For T being aggregate type try to turn it into a incomplete variant.
5299 Return T if no simplification is possible. */
5301 static tree
5302 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5304 if (!t)
5305 return NULL;
5306 if (POINTER_TYPE_P (t))
5308 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5309 if (t2 != TREE_TYPE (t))
5311 tree first;
5312 if (TREE_CODE (t) == POINTER_TYPE)
5313 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5314 TYPE_REF_CAN_ALIAS_ALL (t));
5315 else
5316 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5317 TYPE_REF_CAN_ALIAS_ALL (t));
5318 gcc_assert (TYPE_CANONICAL (t2) != t2
5319 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5320 if (!fld->pset.add (first))
5321 add_tree_to_fld_list (first, fld);
5322 return fld_type_variant (first, t, fld);
5324 return t;
5326 if (TREE_CODE (t) == ARRAY_TYPE)
5327 return fld_process_array_type (t,
5328 fld_incomplete_type_of (TREE_TYPE (t), fld),
5329 fld_incomplete_types, fld);
5330 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5331 || !COMPLETE_TYPE_P (t))
5332 return t;
5333 if (TYPE_MAIN_VARIANT (t) == t)
5335 bool existed;
5336 tree &copy
5337 = fld_incomplete_types->get_or_insert (t, &existed);
5339 if (!existed)
5341 copy = build_distinct_type_copy (t);
5343 /* It is possible that type was not seen by free_lang_data yet. */
5344 if (!fld->pset.add (copy))
5345 add_tree_to_fld_list (copy, fld);
5346 TYPE_SIZE (copy) = NULL;
5347 TYPE_USER_ALIGN (copy) = 0;
5348 TYPE_SIZE_UNIT (copy) = NULL;
5349 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5350 TREE_ADDRESSABLE (copy) = 0;
5351 if (AGGREGATE_TYPE_P (t))
5353 SET_TYPE_MODE (copy, VOIDmode);
5354 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5355 TYPE_TYPELESS_STORAGE (copy) = 0;
5356 TYPE_FIELDS (copy) = NULL;
5357 TYPE_BINFO (copy) = NULL;
5359 else
5360 TYPE_VALUES (copy) = NULL;
5362 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5363 This is needed for ODR violation warnings to come out right (we
5364 want duplicate TYPE_DECLs whenever the type is duplicated because
5365 of ODR violation. Because lang data in the TYPE_DECL may not
5366 have been freed yet, rebuild it from scratch and copy relevant
5367 fields. */
5368 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5369 tree name = TYPE_NAME (copy);
5371 if (name && TREE_CODE (name) == TYPE_DECL)
5373 gcc_checking_assert (TREE_TYPE (name) == t);
5374 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5375 DECL_NAME (name), copy);
5376 if (DECL_ASSEMBLER_NAME_SET_P (name))
5377 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5378 SET_DECL_ALIGN (name2, 0);
5379 DECL_CONTEXT (name2) = fld_decl_context
5380 (DECL_CONTEXT (name));
5381 TYPE_NAME (copy) = name2;
5384 return copy;
5386 return (fld_type_variant
5387 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5390 /* Simplify type T for scenarios where we do not need complete pointer
5391 types. */
5393 static tree
5394 fld_simplified_type (tree t, class free_lang_data_d *fld)
5396 if (!t)
5397 return t;
5398 if (POINTER_TYPE_P (t))
5399 return fld_incomplete_type_of (t, fld);
5400 /* FIXME: This triggers verification error, see PR88140. */
5401 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5402 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5403 fld_simplified_types, fld);
5404 return t;
5407 /* Reset the expression *EXPR_P, a size or position.
5409 ??? We could reset all non-constant sizes or positions. But it's cheap
5410 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5412 We need to reset self-referential sizes or positions because they cannot
5413 be gimplified and thus can contain a CALL_EXPR after the gimplification
5414 is finished, which will run afoul of LTO streaming. And they need to be
5415 reset to something essentially dummy but not constant, so as to preserve
5416 the properties of the object they are attached to. */
5418 static inline void
5419 free_lang_data_in_one_sizepos (tree *expr_p)
5421 tree expr = *expr_p;
5422 if (CONTAINS_PLACEHOLDER_P (expr))
5423 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5427 /* Reset all the fields in a binfo node BINFO. We only keep
5428 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5430 static void
5431 free_lang_data_in_binfo (tree binfo)
5433 unsigned i;
5434 tree t;
5436 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5438 BINFO_VIRTUALS (binfo) = NULL_TREE;
5439 BINFO_BASE_ACCESSES (binfo) = NULL;
5440 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5441 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5442 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5444 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5445 free_lang_data_in_binfo (t);
5449 /* Reset all language specific information still present in TYPE. */
5451 static void
5452 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5454 gcc_assert (TYPE_P (type));
5456 /* Give the FE a chance to remove its own data first. */
5457 lang_hooks.free_lang_data (type);
5459 TREE_LANG_FLAG_0 (type) = 0;
5460 TREE_LANG_FLAG_1 (type) = 0;
5461 TREE_LANG_FLAG_2 (type) = 0;
5462 TREE_LANG_FLAG_3 (type) = 0;
5463 TREE_LANG_FLAG_4 (type) = 0;
5464 TREE_LANG_FLAG_5 (type) = 0;
5465 TREE_LANG_FLAG_6 (type) = 0;
5467 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5469 /* Purge non-marked variants from the variants chain, so that they
5470 don't reappear in the IL after free_lang_data. */
5471 while (TYPE_NEXT_VARIANT (type)
5472 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5474 tree t = TYPE_NEXT_VARIANT (type);
5475 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5476 /* Turn the removed types into distinct types. */
5477 TYPE_MAIN_VARIANT (t) = t;
5478 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5481 if (TREE_CODE (type) == FUNCTION_TYPE)
5483 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5484 /* Remove the const and volatile qualifiers from arguments. The
5485 C++ front end removes them, but the C front end does not,
5486 leading to false ODR violation errors when merging two
5487 instances of the same function signature compiled by
5488 different front ends. */
5489 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5491 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5492 tree arg_type = TREE_VALUE (p);
5494 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5496 int quals = TYPE_QUALS (arg_type)
5497 & ~TYPE_QUAL_CONST
5498 & ~TYPE_QUAL_VOLATILE;
5499 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5500 if (!fld->pset.add (TREE_VALUE (p)))
5501 free_lang_data_in_type (TREE_VALUE (p), fld);
5503 /* C++ FE uses TREE_PURPOSE to store initial values. */
5504 TREE_PURPOSE (p) = NULL;
5507 else if (TREE_CODE (type) == METHOD_TYPE)
5509 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5510 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5512 /* C++ FE uses TREE_PURPOSE to store initial values. */
5513 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5514 TREE_PURPOSE (p) = NULL;
5517 else if (RECORD_OR_UNION_TYPE_P (type))
5519 /* Remove members that are not FIELD_DECLs from the field list
5520 of an aggregate. These occur in C++. */
5521 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5522 if (TREE_CODE (member) == FIELD_DECL)
5523 prev = &DECL_CHAIN (member);
5524 else
5525 *prev = DECL_CHAIN (member);
5527 TYPE_VFIELD (type) = NULL_TREE;
5529 if (TYPE_BINFO (type))
5531 free_lang_data_in_binfo (TYPE_BINFO (type));
5532 /* We need to preserve link to bases and virtual table for all
5533 polymorphic types to make devirtualization machinery working. */
5534 if (!BINFO_VTABLE (TYPE_BINFO (type))
5535 || !flag_devirtualize)
5536 TYPE_BINFO (type) = NULL;
5539 else if (INTEGRAL_TYPE_P (type)
5540 || SCALAR_FLOAT_TYPE_P (type)
5541 || FIXED_POINT_TYPE_P (type))
5543 if (TREE_CODE (type) == ENUMERAL_TYPE)
5545 /* Type values are used only for C++ ODR checking. Drop them
5546 for all type variants and non-ODR types.
5547 For ODR types the data is freed in free_odr_warning_data. */
5548 if (TYPE_MAIN_VARIANT (type) != type
5549 || !type_with_linkage_p (type))
5550 TYPE_VALUES (type) = NULL;
5551 else
5552 /* Simplify representation by recording only values rather
5553 than const decls. */
5554 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5555 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5556 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5558 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5559 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5562 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5564 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5565 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5567 if (TYPE_CONTEXT (type)
5568 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5570 tree ctx = TYPE_CONTEXT (type);
5573 ctx = BLOCK_SUPERCONTEXT (ctx);
5575 while (ctx && TREE_CODE (ctx) == BLOCK);
5576 TYPE_CONTEXT (type) = ctx;
5579 TYPE_STUB_DECL (type) = NULL;
5580 TYPE_NAME (type) = fld_simplified_type_name (type);
5584 /* Return true if DECL may need an assembler name to be set. */
5586 static inline bool
5587 need_assembler_name_p (tree decl)
5589 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5590 Rule merging. This makes type_odr_p to return true on those types during
5591 LTO and by comparing the mangled name, we can say what types are intended
5592 to be equivalent across compilation unit.
5594 We do not store names of type_in_anonymous_namespace_p.
5596 Record, union and enumeration type have linkage that allows use
5597 to check type_in_anonymous_namespace_p. We do not mangle compound types
5598 that always can be compared structurally.
5600 Similarly for builtin types, we compare properties of their main variant.
5601 A special case are integer types where mangling do make differences
5602 between char/signed char/unsigned char etc. Storing name for these makes
5603 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5604 See cp/mangle.c:write_builtin_type for details. */
5606 if (TREE_CODE (decl) == TYPE_DECL)
5608 if (DECL_NAME (decl)
5609 && decl == TYPE_NAME (TREE_TYPE (decl))
5610 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5611 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5612 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5613 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5614 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5615 && (type_with_linkage_p (TREE_TYPE (decl))
5616 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5617 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5618 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5619 return false;
5621 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5622 if (!VAR_OR_FUNCTION_DECL_P (decl))
5623 return false;
5625 /* If DECL already has its assembler name set, it does not need a
5626 new one. */
5627 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5628 || DECL_ASSEMBLER_NAME_SET_P (decl))
5629 return false;
5631 /* Abstract decls do not need an assembler name. */
5632 if (DECL_ABSTRACT_P (decl))
5633 return false;
5635 /* For VAR_DECLs, only static, public and external symbols need an
5636 assembler name. */
5637 if (VAR_P (decl)
5638 && !TREE_STATIC (decl)
5639 && !TREE_PUBLIC (decl)
5640 && !DECL_EXTERNAL (decl))
5641 return false;
5643 if (TREE_CODE (decl) == FUNCTION_DECL)
5645 /* Do not set assembler name on builtins. Allow RTL expansion to
5646 decide whether to expand inline or via a regular call. */
5647 if (fndecl_built_in_p (decl)
5648 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5649 return false;
5651 /* Functions represented in the callgraph need an assembler name. */
5652 if (cgraph_node::get (decl) != NULL)
5653 return true;
5655 /* Unused and not public functions don't need an assembler name. */
5656 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5657 return false;
5660 return true;
5664 /* Reset all language specific information still present in symbol
5665 DECL. */
5667 static void
5668 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5670 gcc_assert (DECL_P (decl));
5672 /* Give the FE a chance to remove its own data first. */
5673 lang_hooks.free_lang_data (decl);
5675 TREE_LANG_FLAG_0 (decl) = 0;
5676 TREE_LANG_FLAG_1 (decl) = 0;
5677 TREE_LANG_FLAG_2 (decl) = 0;
5678 TREE_LANG_FLAG_3 (decl) = 0;
5679 TREE_LANG_FLAG_4 (decl) = 0;
5680 TREE_LANG_FLAG_5 (decl) = 0;
5681 TREE_LANG_FLAG_6 (decl) = 0;
5683 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5684 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5685 if (TREE_CODE (decl) == FIELD_DECL)
5687 DECL_FCONTEXT (decl) = NULL;
5688 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5689 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5690 DECL_QUALIFIER (decl) = NULL_TREE;
5693 if (TREE_CODE (decl) == FUNCTION_DECL)
5695 struct cgraph_node *node;
5696 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5697 the address may be taken in other unit, so this flag has no practical
5698 use for middle-end.
5700 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5701 for public objects that indeed cannot be adressed, but it is not
5702 the case. Set the flag to true so we do not get merge failures for
5703 i.e. virtual tables between units that take address of it and
5704 units that don't. */
5705 if (TREE_PUBLIC (decl))
5706 TREE_ADDRESSABLE (decl) = true;
5707 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5708 if (!(node = cgraph_node::get (decl))
5709 || (!node->definition && !node->clones))
5711 if (node)
5712 node->release_body ();
5713 else
5715 release_function_body (decl);
5716 DECL_ARGUMENTS (decl) = NULL;
5717 DECL_RESULT (decl) = NULL;
5718 DECL_INITIAL (decl) = error_mark_node;
5721 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5723 tree t;
5725 /* If DECL has a gimple body, then the context for its
5726 arguments must be DECL. Otherwise, it doesn't really
5727 matter, as we will not be emitting any code for DECL. In
5728 general, there may be other instances of DECL created by
5729 the front end and since PARM_DECLs are generally shared,
5730 their DECL_CONTEXT changes as the replicas of DECL are
5731 created. The only time where DECL_CONTEXT is important
5732 is for the FUNCTION_DECLs that have a gimple body (since
5733 the PARM_DECL will be used in the function's body). */
5734 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5735 DECL_CONTEXT (t) = decl;
5736 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5737 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5738 = target_option_default_node;
5739 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5740 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5741 = optimization_default_node;
5744 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5745 At this point, it is not needed anymore. */
5746 DECL_SAVED_TREE (decl) = NULL_TREE;
5748 /* Clear the abstract origin if it refers to a method.
5749 Otherwise dwarf2out.c will ICE as we splice functions out of
5750 TYPE_FIELDS and thus the origin will not be output
5751 correctly. */
5752 if (DECL_ABSTRACT_ORIGIN (decl)
5753 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5754 && RECORD_OR_UNION_TYPE_P
5755 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5756 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5758 DECL_VINDEX (decl) = NULL_TREE;
5760 else if (VAR_P (decl))
5762 /* See comment above why we set the flag for functoins. */
5763 if (TREE_PUBLIC (decl))
5764 TREE_ADDRESSABLE (decl) = true;
5765 if ((DECL_EXTERNAL (decl)
5766 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5767 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5768 DECL_INITIAL (decl) = NULL_TREE;
5770 else if (TREE_CODE (decl) == TYPE_DECL)
5772 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5773 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5774 TREE_PUBLIC (decl) = 0;
5775 TREE_PRIVATE (decl) = 0;
5776 DECL_ARTIFICIAL (decl) = 0;
5777 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5778 DECL_INITIAL (decl) = NULL_TREE;
5779 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5780 DECL_MODE (decl) = VOIDmode;
5781 SET_DECL_ALIGN (decl, 0);
5782 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5784 else if (TREE_CODE (decl) == FIELD_DECL)
5786 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5787 DECL_INITIAL (decl) = NULL_TREE;
5789 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5790 && DECL_INITIAL (decl)
5791 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5793 /* Strip builtins from the translation-unit BLOCK. We still have targets
5794 without builtin_decl_explicit support and also builtins are shared
5795 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5796 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5797 while (*nextp)
5799 tree var = *nextp;
5800 if (fndecl_built_in_p (var))
5801 *nextp = TREE_CHAIN (var);
5802 else
5803 nextp = &TREE_CHAIN (var);
5806 /* We need to keep field decls associated with their trees. Otherwise tree
5807 merging may merge some fileds and keep others disjoint wich in turn will
5808 not do well with TREE_CHAIN pointers linking them.
5810 Also do not drop containing types for virtual methods and tables because
5811 these are needed by devirtualization.
5812 C++ destructors are special because C++ frontends sometimes produces
5813 virtual destructor as an alias of non-virtual destructor. In
5814 devirutalization code we always walk through aliases and we need
5815 context to be preserved too. See PR89335 */
5816 if (TREE_CODE (decl) != FIELD_DECL
5817 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5818 || (!DECL_VIRTUAL_P (decl)
5819 && (TREE_CODE (decl) != FUNCTION_DECL
5820 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5821 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5825 /* Operand callback helper for free_lang_data_in_node. *TP is the
5826 subtree operand being considered. */
5828 static tree
5829 find_decls_types_r (tree *tp, int *ws, void *data)
5831 tree t = *tp;
5832 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5834 if (TREE_CODE (t) == TREE_LIST)
5835 return NULL_TREE;
5837 /* Language specific nodes will be removed, so there is no need
5838 to gather anything under them. */
5839 if (is_lang_specific (t))
5841 *ws = 0;
5842 return NULL_TREE;
5845 if (DECL_P (t))
5847 /* Note that walk_tree does not traverse every possible field in
5848 decls, so we have to do our own traversals here. */
5849 add_tree_to_fld_list (t, fld);
5851 fld_worklist_push (DECL_NAME (t), fld);
5852 fld_worklist_push (DECL_CONTEXT (t), fld);
5853 fld_worklist_push (DECL_SIZE (t), fld);
5854 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5856 /* We are going to remove everything under DECL_INITIAL for
5857 TYPE_DECLs. No point walking them. */
5858 if (TREE_CODE (t) != TYPE_DECL)
5859 fld_worklist_push (DECL_INITIAL (t), fld);
5861 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5862 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5864 if (TREE_CODE (t) == FUNCTION_DECL)
5866 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5867 fld_worklist_push (DECL_RESULT (t), fld);
5869 else if (TREE_CODE (t) == FIELD_DECL)
5871 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5872 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5873 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5874 fld_worklist_push (DECL_FCONTEXT (t), fld);
5877 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5878 && DECL_HAS_VALUE_EXPR_P (t))
5879 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5881 if (TREE_CODE (t) != FIELD_DECL
5882 && TREE_CODE (t) != TYPE_DECL)
5883 fld_worklist_push (TREE_CHAIN (t), fld);
5884 *ws = 0;
5886 else if (TYPE_P (t))
5888 /* Note that walk_tree does not traverse every possible field in
5889 types, so we have to do our own traversals here. */
5890 add_tree_to_fld_list (t, fld);
5892 if (!RECORD_OR_UNION_TYPE_P (t))
5893 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5894 fld_worklist_push (TYPE_SIZE (t), fld);
5895 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5896 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5897 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5898 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5899 fld_worklist_push (TYPE_NAME (t), fld);
5900 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5901 lists, we may look types up in these lists and use them while
5902 optimizing the function body. Thus we need to free lang data
5903 in them. */
5904 if (TREE_CODE (t) == POINTER_TYPE)
5905 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5906 if (TREE_CODE (t) == REFERENCE_TYPE)
5907 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5908 if (!POINTER_TYPE_P (t))
5909 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5910 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5911 if (!RECORD_OR_UNION_TYPE_P (t))
5912 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5913 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5914 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5915 do not and want not to reach unused variants this way. */
5916 if (TYPE_CONTEXT (t))
5918 tree ctx = TYPE_CONTEXT (t);
5919 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5920 So push that instead. */
5921 while (ctx && TREE_CODE (ctx) == BLOCK)
5922 ctx = BLOCK_SUPERCONTEXT (ctx);
5923 fld_worklist_push (ctx, fld);
5925 fld_worklist_push (TYPE_CANONICAL (t), fld);
5927 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5929 unsigned i;
5930 tree tem;
5931 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5932 fld_worklist_push (TREE_TYPE (tem), fld);
5933 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5934 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5936 if (RECORD_OR_UNION_TYPE_P (t))
5938 tree tem;
5939 /* Push all TYPE_FIELDS - there can be interleaving interesting
5940 and non-interesting things. */
5941 tem = TYPE_FIELDS (t);
5942 while (tem)
5944 if (TREE_CODE (tem) == FIELD_DECL)
5945 fld_worklist_push (tem, fld);
5946 tem = TREE_CHAIN (tem);
5949 if (FUNC_OR_METHOD_TYPE_P (t))
5950 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5952 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5953 *ws = 0;
5955 else if (TREE_CODE (t) == BLOCK)
5957 for (tree *tem = &BLOCK_VARS (t); *tem; )
5959 if (TREE_CODE (*tem) != VAR_DECL
5960 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem)))
5962 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5963 && TREE_CODE (*tem) != PARM_DECL);
5964 *tem = TREE_CHAIN (*tem);
5966 else
5968 fld_worklist_push (*tem, fld);
5969 tem = &TREE_CHAIN (*tem);
5972 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5973 fld_worklist_push (tem, fld);
5974 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5977 if (TREE_CODE (t) != IDENTIFIER_NODE
5978 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5979 fld_worklist_push (TREE_TYPE (t), fld);
5981 return NULL_TREE;
5985 /* Find decls and types in T. */
5987 static void
5988 find_decls_types (tree t, class free_lang_data_d *fld)
5990 while (1)
5992 if (!fld->pset.contains (t))
5993 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5994 if (fld->worklist.is_empty ())
5995 break;
5996 t = fld->worklist.pop ();
6000 /* Translate all the types in LIST with the corresponding runtime
6001 types. */
6003 static tree
6004 get_eh_types_for_runtime (tree list)
6006 tree head, prev;
6008 if (list == NULL_TREE)
6009 return NULL_TREE;
6011 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6012 prev = head;
6013 list = TREE_CHAIN (list);
6014 while (list)
6016 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6017 TREE_CHAIN (prev) = n;
6018 prev = TREE_CHAIN (prev);
6019 list = TREE_CHAIN (list);
6022 return head;
6026 /* Find decls and types referenced in EH region R and store them in
6027 FLD->DECLS and FLD->TYPES. */
6029 static void
6030 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6032 switch (r->type)
6034 case ERT_CLEANUP:
6035 break;
6037 case ERT_TRY:
6039 eh_catch c;
6041 /* The types referenced in each catch must first be changed to the
6042 EH types used at runtime. This removes references to FE types
6043 in the region. */
6044 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6046 c->type_list = get_eh_types_for_runtime (c->type_list);
6047 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6050 break;
6052 case ERT_ALLOWED_EXCEPTIONS:
6053 r->u.allowed.type_list
6054 = get_eh_types_for_runtime (r->u.allowed.type_list);
6055 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6056 break;
6058 case ERT_MUST_NOT_THROW:
6059 walk_tree (&r->u.must_not_throw.failure_decl,
6060 find_decls_types_r, fld, &fld->pset);
6061 break;
6066 /* Find decls and types referenced in cgraph node N and store them in
6067 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6068 look for *every* kind of DECL and TYPE node reachable from N,
6069 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6070 NAMESPACE_DECLs, etc). */
6072 static void
6073 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6075 basic_block bb;
6076 struct function *fn;
6077 unsigned ix;
6078 tree t;
6080 find_decls_types (n->decl, fld);
6082 if (!gimple_has_body_p (n->decl))
6083 return;
6085 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6087 fn = DECL_STRUCT_FUNCTION (n->decl);
6089 /* Traverse locals. */
6090 FOR_EACH_LOCAL_DECL (fn, ix, t)
6091 find_decls_types (t, fld);
6093 /* Traverse EH regions in FN. */
6095 eh_region r;
6096 FOR_ALL_EH_REGION_FN (r, fn)
6097 find_decls_types_in_eh_region (r, fld);
6100 /* Traverse every statement in FN. */
6101 FOR_EACH_BB_FN (bb, fn)
6103 gphi_iterator psi;
6104 gimple_stmt_iterator si;
6105 unsigned i;
6107 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6109 gphi *phi = psi.phi ();
6111 for (i = 0; i < gimple_phi_num_args (phi); i++)
6113 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6114 find_decls_types (*arg_p, fld);
6118 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6120 gimple *stmt = gsi_stmt (si);
6122 if (is_gimple_call (stmt))
6123 find_decls_types (gimple_call_fntype (stmt), fld);
6125 for (i = 0; i < gimple_num_ops (stmt); i++)
6127 tree arg = gimple_op (stmt, i);
6128 find_decls_types (arg, fld);
6135 /* Find decls and types referenced in varpool node N and store them in
6136 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6137 look for *every* kind of DECL and TYPE node reachable from N,
6138 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6139 NAMESPACE_DECLs, etc). */
6141 static void
6142 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6144 find_decls_types (v->decl, fld);
6147 /* If T needs an assembler name, have one created for it. */
6149 void
6150 assign_assembler_name_if_needed (tree t)
6152 if (need_assembler_name_p (t))
6154 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6155 diagnostics that use input_location to show locus
6156 information. The problem here is that, at this point,
6157 input_location is generally anchored to the end of the file
6158 (since the parser is long gone), so we don't have a good
6159 position to pin it to.
6161 To alleviate this problem, this uses the location of T's
6162 declaration. Examples of this are
6163 testsuite/g++.dg/template/cond2.C and
6164 testsuite/g++.dg/template/pr35240.C. */
6165 location_t saved_location = input_location;
6166 input_location = DECL_SOURCE_LOCATION (t);
6168 decl_assembler_name (t);
6170 input_location = saved_location;
6175 /* Free language specific information for every operand and expression
6176 in every node of the call graph. This process operates in three stages:
6178 1- Every callgraph node and varpool node is traversed looking for
6179 decls and types embedded in them. This is a more exhaustive
6180 search than that done by find_referenced_vars, because it will
6181 also collect individual fields, decls embedded in types, etc.
6183 2- All the decls found are sent to free_lang_data_in_decl.
6185 3- All the types found are sent to free_lang_data_in_type.
6187 The ordering between decls and types is important because
6188 free_lang_data_in_decl sets assembler names, which includes
6189 mangling. So types cannot be freed up until assembler names have
6190 been set up. */
6192 static void
6193 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6195 struct cgraph_node *n;
6196 varpool_node *v;
6197 tree t;
6198 unsigned i;
6199 alias_pair *p;
6201 /* Find decls and types in the body of every function in the callgraph. */
6202 FOR_EACH_FUNCTION (n)
6203 find_decls_types_in_node (n, fld);
6205 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6206 find_decls_types (p->decl, fld);
6208 /* Find decls and types in every varpool symbol. */
6209 FOR_EACH_VARIABLE (v)
6210 find_decls_types_in_var (v, fld);
6212 /* Set the assembler name on every decl found. We need to do this
6213 now because free_lang_data_in_decl will invalidate data needed
6214 for mangling. This breaks mangling on interdependent decls. */
6215 FOR_EACH_VEC_ELT (fld->decls, i, t)
6216 assign_assembler_name_if_needed (t);
6218 /* Traverse every decl found freeing its language data. */
6219 FOR_EACH_VEC_ELT (fld->decls, i, t)
6220 free_lang_data_in_decl (t, fld);
6222 /* Traverse every type found freeing its language data. */
6223 FOR_EACH_VEC_ELT (fld->types, i, t)
6224 free_lang_data_in_type (t, fld);
6228 /* Free resources that are used by FE but are not needed once they are done. */
6230 static unsigned
6231 free_lang_data (void)
6233 unsigned i;
6234 class free_lang_data_d fld;
6236 /* If we are the LTO frontend we have freed lang-specific data already. */
6237 if (in_lto_p
6238 || (!flag_generate_lto && !flag_generate_offload))
6240 /* Rebuild type inheritance graph even when not doing LTO to get
6241 consistent profile data. */
6242 rebuild_type_inheritance_graph ();
6243 return 0;
6246 fld_incomplete_types = new hash_map<tree, tree>;
6247 fld_simplified_types = new hash_map<tree, tree>;
6249 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6250 if (vec_safe_is_empty (all_translation_units))
6251 build_translation_unit_decl (NULL_TREE);
6253 /* Allocate and assign alias sets to the standard integer types
6254 while the slots are still in the way the frontends generated them. */
6255 for (i = 0; i < itk_none; ++i)
6256 if (integer_types[i])
6257 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6259 /* Traverse the IL resetting language specific information for
6260 operands, expressions, etc. */
6261 free_lang_data_in_cgraph (&fld);
6263 /* Create gimple variants for common types. */
6264 for (unsigned i = 0;
6265 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6266 ++i)
6267 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6269 /* Reset some langhooks. Do not reset types_compatible_p, it may
6270 still be used indirectly via the get_alias_set langhook. */
6271 lang_hooks.dwarf_name = lhd_dwarf_name;
6272 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6273 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6274 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6275 lang_hooks.print_xnode = lhd_print_tree_nothing;
6276 lang_hooks.print_decl = lhd_print_tree_nothing;
6277 lang_hooks.print_type = lhd_print_tree_nothing;
6278 lang_hooks.print_identifier = lhd_print_tree_nothing;
6280 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6282 if (flag_checking)
6284 int i;
6285 tree t;
6287 FOR_EACH_VEC_ELT (fld.types, i, t)
6288 verify_type (t);
6291 /* We do not want the default decl_assembler_name implementation,
6292 rather if we have fixed everything we want a wrapper around it
6293 asserting that all non-local symbols already got their assembler
6294 name and only produce assembler names for local symbols. Or rather
6295 make sure we never call decl_assembler_name on local symbols and
6296 devise a separate, middle-end private scheme for it. */
6298 /* Reset diagnostic machinery. */
6299 tree_diagnostics_defaults (global_dc);
6301 rebuild_type_inheritance_graph ();
6303 delete fld_incomplete_types;
6304 delete fld_simplified_types;
6306 return 0;
6310 namespace {
6312 const pass_data pass_data_ipa_free_lang_data =
6314 SIMPLE_IPA_PASS, /* type */
6315 "*free_lang_data", /* name */
6316 OPTGROUP_NONE, /* optinfo_flags */
6317 TV_IPA_FREE_LANG_DATA, /* tv_id */
6318 0, /* properties_required */
6319 0, /* properties_provided */
6320 0, /* properties_destroyed */
6321 0, /* todo_flags_start */
6322 0, /* todo_flags_finish */
6325 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6327 public:
6328 pass_ipa_free_lang_data (gcc::context *ctxt)
6329 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6332 /* opt_pass methods: */
6333 virtual unsigned int execute (function *) { return free_lang_data (); }
6335 }; // class pass_ipa_free_lang_data
6337 } // anon namespace
6339 simple_ipa_opt_pass *
6340 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6342 return new pass_ipa_free_lang_data (ctxt);
6345 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6346 of the various TYPE_QUAL values. */
6348 static void
6349 set_type_quals (tree type, int type_quals)
6351 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6352 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6353 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6354 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6355 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6358 /* Returns true iff CAND and BASE have equivalent language-specific
6359 qualifiers. */
6361 bool
6362 check_lang_type (const_tree cand, const_tree base)
6364 if (lang_hooks.types.type_hash_eq == NULL)
6365 return true;
6366 /* type_hash_eq currently only applies to these types. */
6367 if (TREE_CODE (cand) != FUNCTION_TYPE
6368 && TREE_CODE (cand) != METHOD_TYPE)
6369 return true;
6370 return lang_hooks.types.type_hash_eq (cand, base);
6373 /* This function checks to see if TYPE matches the size one of the built-in
6374 atomic types, and returns that core atomic type. */
6376 static tree
6377 find_atomic_core_type (const_tree type)
6379 tree base_atomic_type;
6381 /* Only handle complete types. */
6382 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6383 return NULL_TREE;
6385 switch (tree_to_uhwi (TYPE_SIZE (type)))
6387 case 8:
6388 base_atomic_type = atomicQI_type_node;
6389 break;
6391 case 16:
6392 base_atomic_type = atomicHI_type_node;
6393 break;
6395 case 32:
6396 base_atomic_type = atomicSI_type_node;
6397 break;
6399 case 64:
6400 base_atomic_type = atomicDI_type_node;
6401 break;
6403 case 128:
6404 base_atomic_type = atomicTI_type_node;
6405 break;
6407 default:
6408 base_atomic_type = NULL_TREE;
6411 return base_atomic_type;
6414 /* Returns true iff unqualified CAND and BASE are equivalent. */
6416 bool
6417 check_base_type (const_tree cand, const_tree base)
6419 if (TYPE_NAME (cand) != TYPE_NAME (base)
6420 /* Apparently this is needed for Objective-C. */
6421 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6422 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6423 TYPE_ATTRIBUTES (base)))
6424 return false;
6425 /* Check alignment. */
6426 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6427 return true;
6428 /* Atomic types increase minimal alignment. We must to do so as well
6429 or we get duplicated canonical types. See PR88686. */
6430 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6432 /* See if this object can map to a basic atomic type. */
6433 tree atomic_type = find_atomic_core_type (cand);
6434 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6435 return true;
6437 return false;
6440 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6442 bool
6443 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6445 return (TYPE_QUALS (cand) == type_quals
6446 && check_base_type (cand, base)
6447 && check_lang_type (cand, base));
6450 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6452 static bool
6453 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6455 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6456 && TYPE_NAME (cand) == TYPE_NAME (base)
6457 /* Apparently this is needed for Objective-C. */
6458 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6459 /* Check alignment. */
6460 && TYPE_ALIGN (cand) == align
6461 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6462 TYPE_ATTRIBUTES (base))
6463 && check_lang_type (cand, base));
6466 /* Return a version of the TYPE, qualified as indicated by the
6467 TYPE_QUALS, if one exists. If no qualified version exists yet,
6468 return NULL_TREE. */
6470 tree
6471 get_qualified_type (tree type, int type_quals)
6473 if (TYPE_QUALS (type) == type_quals)
6474 return type;
6476 tree mv = TYPE_MAIN_VARIANT (type);
6477 if (check_qualified_type (mv, type, type_quals))
6478 return mv;
6480 /* Search the chain of variants to see if there is already one there just
6481 like the one we need to have. If so, use that existing one. We must
6482 preserve the TYPE_NAME, since there is code that depends on this. */
6483 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6484 if (check_qualified_type (*tp, type, type_quals))
6486 /* Put the found variant at the head of the variant list so
6487 frequently searched variants get found faster. The C++ FE
6488 benefits greatly from this. */
6489 tree t = *tp;
6490 *tp = TYPE_NEXT_VARIANT (t);
6491 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6492 TYPE_NEXT_VARIANT (mv) = t;
6493 return t;
6496 return NULL_TREE;
6499 /* Like get_qualified_type, but creates the type if it does not
6500 exist. This function never returns NULL_TREE. */
6502 tree
6503 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6505 tree t;
6507 /* See if we already have the appropriate qualified variant. */
6508 t = get_qualified_type (type, type_quals);
6510 /* If not, build it. */
6511 if (!t)
6513 t = build_variant_type_copy (type PASS_MEM_STAT);
6514 set_type_quals (t, type_quals);
6516 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6518 /* See if this object can map to a basic atomic type. */
6519 tree atomic_type = find_atomic_core_type (type);
6520 if (atomic_type)
6522 /* Ensure the alignment of this type is compatible with
6523 the required alignment of the atomic type. */
6524 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6525 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6529 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6530 /* Propagate structural equality. */
6531 SET_TYPE_STRUCTURAL_EQUALITY (t);
6532 else if (TYPE_CANONICAL (type) != type)
6533 /* Build the underlying canonical type, since it is different
6534 from TYPE. */
6536 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6537 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6539 else
6540 /* T is its own canonical type. */
6541 TYPE_CANONICAL (t) = t;
6545 return t;
6548 /* Create a variant of type T with alignment ALIGN. */
6550 tree
6551 build_aligned_type (tree type, unsigned int align)
6553 tree t;
6555 if (TYPE_PACKED (type)
6556 || TYPE_ALIGN (type) == align)
6557 return type;
6559 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6560 if (check_aligned_type (t, type, align))
6561 return t;
6563 t = build_variant_type_copy (type);
6564 SET_TYPE_ALIGN (t, align);
6565 TYPE_USER_ALIGN (t) = 1;
6567 return t;
6570 /* Create a new distinct copy of TYPE. The new type is made its own
6571 MAIN_VARIANT. If TYPE requires structural equality checks, the
6572 resulting type requires structural equality checks; otherwise, its
6573 TYPE_CANONICAL points to itself. */
6575 tree
6576 build_distinct_type_copy (tree type MEM_STAT_DECL)
6578 tree t = copy_node (type PASS_MEM_STAT);
6580 TYPE_POINTER_TO (t) = 0;
6581 TYPE_REFERENCE_TO (t) = 0;
6583 /* Set the canonical type either to a new equivalence class, or
6584 propagate the need for structural equality checks. */
6585 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6586 SET_TYPE_STRUCTURAL_EQUALITY (t);
6587 else
6588 TYPE_CANONICAL (t) = t;
6590 /* Make it its own variant. */
6591 TYPE_MAIN_VARIANT (t) = t;
6592 TYPE_NEXT_VARIANT (t) = 0;
6594 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6595 whose TREE_TYPE is not t. This can also happen in the Ada
6596 frontend when using subtypes. */
6598 return t;
6601 /* Create a new variant of TYPE, equivalent but distinct. This is so
6602 the caller can modify it. TYPE_CANONICAL for the return type will
6603 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6604 are considered equal by the language itself (or that both types
6605 require structural equality checks). */
6607 tree
6608 build_variant_type_copy (tree type MEM_STAT_DECL)
6610 tree t, m = TYPE_MAIN_VARIANT (type);
6612 t = build_distinct_type_copy (type PASS_MEM_STAT);
6614 /* Since we're building a variant, assume that it is a non-semantic
6615 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6616 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6617 /* Type variants have no alias set defined. */
6618 TYPE_ALIAS_SET (t) = -1;
6620 /* Add the new type to the chain of variants of TYPE. */
6621 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6622 TYPE_NEXT_VARIANT (m) = t;
6623 TYPE_MAIN_VARIANT (t) = m;
6625 return t;
6628 /* Return true if the from tree in both tree maps are equal. */
6631 tree_map_base_eq (const void *va, const void *vb)
6633 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6634 *const b = (const struct tree_map_base *) vb;
6635 return (a->from == b->from);
6638 /* Hash a from tree in a tree_base_map. */
6640 unsigned int
6641 tree_map_base_hash (const void *item)
6643 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6646 /* Return true if this tree map structure is marked for garbage collection
6647 purposes. We simply return true if the from tree is marked, so that this
6648 structure goes away when the from tree goes away. */
6651 tree_map_base_marked_p (const void *p)
6653 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6656 /* Hash a from tree in a tree_map. */
6658 unsigned int
6659 tree_map_hash (const void *item)
6661 return (((const struct tree_map *) item)->hash);
6664 /* Hash a from tree in a tree_decl_map. */
6666 unsigned int
6667 tree_decl_map_hash (const void *item)
6669 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6672 /* Return the initialization priority for DECL. */
6674 priority_type
6675 decl_init_priority_lookup (tree decl)
6677 symtab_node *snode = symtab_node::get (decl);
6679 if (!snode)
6680 return DEFAULT_INIT_PRIORITY;
6681 return
6682 snode->get_init_priority ();
6685 /* Return the finalization priority for DECL. */
6687 priority_type
6688 decl_fini_priority_lookup (tree decl)
6690 cgraph_node *node = cgraph_node::get (decl);
6692 if (!node)
6693 return DEFAULT_INIT_PRIORITY;
6694 return
6695 node->get_fini_priority ();
6698 /* Set the initialization priority for DECL to PRIORITY. */
6700 void
6701 decl_init_priority_insert (tree decl, priority_type priority)
6703 struct symtab_node *snode;
6705 if (priority == DEFAULT_INIT_PRIORITY)
6707 snode = symtab_node::get (decl);
6708 if (!snode)
6709 return;
6711 else if (VAR_P (decl))
6712 snode = varpool_node::get_create (decl);
6713 else
6714 snode = cgraph_node::get_create (decl);
6715 snode->set_init_priority (priority);
6718 /* Set the finalization priority for DECL to PRIORITY. */
6720 void
6721 decl_fini_priority_insert (tree decl, priority_type priority)
6723 struct cgraph_node *node;
6725 if (priority == DEFAULT_INIT_PRIORITY)
6727 node = cgraph_node::get (decl);
6728 if (!node)
6729 return;
6731 else
6732 node = cgraph_node::get_create (decl);
6733 node->set_fini_priority (priority);
6736 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6738 static void
6739 print_debug_expr_statistics (void)
6741 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6742 (long) debug_expr_for_decl->size (),
6743 (long) debug_expr_for_decl->elements (),
6744 debug_expr_for_decl->collisions ());
6747 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6749 static void
6750 print_value_expr_statistics (void)
6752 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6753 (long) value_expr_for_decl->size (),
6754 (long) value_expr_for_decl->elements (),
6755 value_expr_for_decl->collisions ());
6758 /* Lookup a debug expression for FROM, and return it if we find one. */
6760 tree
6761 decl_debug_expr_lookup (tree from)
6763 struct tree_decl_map *h, in;
6764 in.base.from = from;
6766 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6767 if (h)
6768 return h->to;
6769 return NULL_TREE;
6772 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6774 void
6775 decl_debug_expr_insert (tree from, tree to)
6777 struct tree_decl_map *h;
6779 h = ggc_alloc<tree_decl_map> ();
6780 h->base.from = from;
6781 h->to = to;
6782 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6785 /* Lookup a value expression for FROM, and return it if we find one. */
6787 tree
6788 decl_value_expr_lookup (tree from)
6790 struct tree_decl_map *h, in;
6791 in.base.from = from;
6793 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6794 if (h)
6795 return h->to;
6796 return NULL_TREE;
6799 /* Insert a mapping FROM->TO in the value expression hashtable. */
6801 void
6802 decl_value_expr_insert (tree from, tree to)
6804 struct tree_decl_map *h;
6806 h = ggc_alloc<tree_decl_map> ();
6807 h->base.from = from;
6808 h->to = to;
6809 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6812 /* Lookup a vector of debug arguments for FROM, and return it if we
6813 find one. */
6815 vec<tree, va_gc> **
6816 decl_debug_args_lookup (tree from)
6818 struct tree_vec_map *h, in;
6820 if (!DECL_HAS_DEBUG_ARGS_P (from))
6821 return NULL;
6822 gcc_checking_assert (debug_args_for_decl != NULL);
6823 in.base.from = from;
6824 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6825 if (h)
6826 return &h->to;
6827 return NULL;
6830 /* Insert a mapping FROM->empty vector of debug arguments in the value
6831 expression hashtable. */
6833 vec<tree, va_gc> **
6834 decl_debug_args_insert (tree from)
6836 struct tree_vec_map *h;
6837 tree_vec_map **loc;
6839 if (DECL_HAS_DEBUG_ARGS_P (from))
6840 return decl_debug_args_lookup (from);
6841 if (debug_args_for_decl == NULL)
6842 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6843 h = ggc_alloc<tree_vec_map> ();
6844 h->base.from = from;
6845 h->to = NULL;
6846 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6847 *loc = h;
6848 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6849 return &h->to;
6852 /* Hashing of types so that we don't make duplicates.
6853 The entry point is `type_hash_canon'. */
6855 /* Generate the default hash code for TYPE. This is designed for
6856 speed, rather than maximum entropy. */
6858 hashval_t
6859 type_hash_canon_hash (tree type)
6861 inchash::hash hstate;
6863 hstate.add_int (TREE_CODE (type));
6865 if (TREE_TYPE (type))
6866 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6868 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6869 /* Just the identifier is adequate to distinguish. */
6870 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6872 switch (TREE_CODE (type))
6874 case METHOD_TYPE:
6875 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6876 /* FALLTHROUGH. */
6877 case FUNCTION_TYPE:
6878 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6879 if (TREE_VALUE (t) != error_mark_node)
6880 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6881 break;
6883 case OFFSET_TYPE:
6884 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6885 break;
6887 case ARRAY_TYPE:
6889 if (TYPE_DOMAIN (type))
6890 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6891 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6893 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6894 hstate.add_object (typeless);
6897 break;
6899 case INTEGER_TYPE:
6901 tree t = TYPE_MAX_VALUE (type);
6902 if (!t)
6903 t = TYPE_MIN_VALUE (type);
6904 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6905 hstate.add_object (TREE_INT_CST_ELT (t, i));
6906 break;
6909 case REAL_TYPE:
6910 case FIXED_POINT_TYPE:
6912 unsigned prec = TYPE_PRECISION (type);
6913 hstate.add_object (prec);
6914 break;
6917 case VECTOR_TYPE:
6918 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6919 break;
6921 default:
6922 break;
6925 return hstate.end ();
6928 /* These are the Hashtable callback functions. */
6930 /* Returns true iff the types are equivalent. */
6932 bool
6933 type_cache_hasher::equal (type_hash *a, type_hash *b)
6935 /* First test the things that are the same for all types. */
6936 if (a->hash != b->hash
6937 || TREE_CODE (a->type) != TREE_CODE (b->type)
6938 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6939 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6940 TYPE_ATTRIBUTES (b->type))
6941 || (TREE_CODE (a->type) != COMPLEX_TYPE
6942 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6943 return 0;
6945 /* Be careful about comparing arrays before and after the element type
6946 has been completed; don't compare TYPE_ALIGN unless both types are
6947 complete. */
6948 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6949 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6950 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6951 return 0;
6953 switch (TREE_CODE (a->type))
6955 case VOID_TYPE:
6956 case COMPLEX_TYPE:
6957 case POINTER_TYPE:
6958 case REFERENCE_TYPE:
6959 case NULLPTR_TYPE:
6960 return 1;
6962 case VECTOR_TYPE:
6963 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6964 TYPE_VECTOR_SUBPARTS (b->type));
6966 case ENUMERAL_TYPE:
6967 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6968 && !(TYPE_VALUES (a->type)
6969 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6970 && TYPE_VALUES (b->type)
6971 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6972 && type_list_equal (TYPE_VALUES (a->type),
6973 TYPE_VALUES (b->type))))
6974 return 0;
6976 /* fall through */
6978 case INTEGER_TYPE:
6979 case REAL_TYPE:
6980 case BOOLEAN_TYPE:
6981 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6982 return false;
6983 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6984 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6985 TYPE_MAX_VALUE (b->type)))
6986 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6987 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6988 TYPE_MIN_VALUE (b->type))));
6990 case FIXED_POINT_TYPE:
6991 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6993 case OFFSET_TYPE:
6994 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6996 case METHOD_TYPE:
6997 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6998 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6999 || (TYPE_ARG_TYPES (a->type)
7000 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7001 && TYPE_ARG_TYPES (b->type)
7002 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7003 && type_list_equal (TYPE_ARG_TYPES (a->type),
7004 TYPE_ARG_TYPES (b->type)))))
7005 break;
7006 return 0;
7007 case ARRAY_TYPE:
7008 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7009 where the flag should be inherited from the element type
7010 and can change after ARRAY_TYPEs are created; on non-aggregates
7011 compare it and hash it, scalars will never have that flag set
7012 and we need to differentiate between arrays created by different
7013 front-ends or middle-end created arrays. */
7014 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7015 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7016 || (TYPE_TYPELESS_STORAGE (a->type)
7017 == TYPE_TYPELESS_STORAGE (b->type))));
7019 case RECORD_TYPE:
7020 case UNION_TYPE:
7021 case QUAL_UNION_TYPE:
7022 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7023 || (TYPE_FIELDS (a->type)
7024 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7025 && TYPE_FIELDS (b->type)
7026 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7027 && type_list_equal (TYPE_FIELDS (a->type),
7028 TYPE_FIELDS (b->type))));
7030 case FUNCTION_TYPE:
7031 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7032 || (TYPE_ARG_TYPES (a->type)
7033 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7034 && TYPE_ARG_TYPES (b->type)
7035 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7036 && type_list_equal (TYPE_ARG_TYPES (a->type),
7037 TYPE_ARG_TYPES (b->type))))
7038 break;
7039 return 0;
7041 default:
7042 return 0;
7045 if (lang_hooks.types.type_hash_eq != NULL)
7046 return lang_hooks.types.type_hash_eq (a->type, b->type);
7048 return 1;
7051 /* Given TYPE, and HASHCODE its hash code, return the canonical
7052 object for an identical type if one already exists.
7053 Otherwise, return TYPE, and record it as the canonical object.
7055 To use this function, first create a type of the sort you want.
7056 Then compute its hash code from the fields of the type that
7057 make it different from other similar types.
7058 Then call this function and use the value. */
7060 tree
7061 type_hash_canon (unsigned int hashcode, tree type)
7063 type_hash in;
7064 type_hash **loc;
7066 /* The hash table only contains main variants, so ensure that's what we're
7067 being passed. */
7068 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7070 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7071 must call that routine before comparing TYPE_ALIGNs. */
7072 layout_type (type);
7074 in.hash = hashcode;
7075 in.type = type;
7077 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7078 if (*loc)
7080 tree t1 = ((type_hash *) *loc)->type;
7081 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7082 && t1 != type);
7083 if (TYPE_UID (type) + 1 == next_type_uid)
7084 --next_type_uid;
7085 /* Free also min/max values and the cache for integer
7086 types. This can't be done in free_node, as LTO frees
7087 those on its own. */
7088 if (TREE_CODE (type) == INTEGER_TYPE)
7090 if (TYPE_MIN_VALUE (type)
7091 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7093 /* Zero is always in TYPE_CACHED_VALUES. */
7094 if (! TYPE_UNSIGNED (type))
7095 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7096 ggc_free (TYPE_MIN_VALUE (type));
7098 if (TYPE_MAX_VALUE (type)
7099 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7101 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7102 ggc_free (TYPE_MAX_VALUE (type));
7104 if (TYPE_CACHED_VALUES_P (type))
7105 ggc_free (TYPE_CACHED_VALUES (type));
7107 free_node (type);
7108 return t1;
7110 else
7112 struct type_hash *h;
7114 h = ggc_alloc<type_hash> ();
7115 h->hash = hashcode;
7116 h->type = type;
7117 *loc = h;
7119 return type;
7123 static void
7124 print_type_hash_statistics (void)
7126 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7127 (long) type_hash_table->size (),
7128 (long) type_hash_table->elements (),
7129 type_hash_table->collisions ());
7132 /* Given two lists of types
7133 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7134 return 1 if the lists contain the same types in the same order.
7135 Also, the TREE_PURPOSEs must match. */
7137 bool
7138 type_list_equal (const_tree l1, const_tree l2)
7140 const_tree t1, t2;
7142 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7143 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7144 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7145 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7146 && (TREE_TYPE (TREE_PURPOSE (t1))
7147 == TREE_TYPE (TREE_PURPOSE (t2))))))
7148 return false;
7150 return t1 == t2;
7153 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7154 given by TYPE. If the argument list accepts variable arguments,
7155 then this function counts only the ordinary arguments. */
7158 type_num_arguments (const_tree fntype)
7160 int i = 0;
7162 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7163 /* If the function does not take a variable number of arguments,
7164 the last element in the list will have type `void'. */
7165 if (VOID_TYPE_P (TREE_VALUE (t)))
7166 break;
7167 else
7168 ++i;
7170 return i;
7173 /* Return the type of the function TYPE's argument ARGNO if known.
7174 For vararg function's where ARGNO refers to one of the variadic
7175 arguments return null. Otherwise, return a void_type_node for
7176 out-of-bounds ARGNO. */
7178 tree
7179 type_argument_type (const_tree fntype, unsigned argno)
7181 /* Treat zero the same as an out-of-bounds argument number. */
7182 if (!argno)
7183 return void_type_node;
7185 function_args_iterator iter;
7187 tree argtype;
7188 unsigned i = 1;
7189 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7191 /* A vararg function's argument list ends in a null. Otherwise,
7192 an ordinary function's argument list ends with void. Return
7193 null if ARGNO refers to a vararg argument, void_type_node if
7194 it's out of bounds, and the formal argument type otherwise. */
7195 if (!argtype)
7196 break;
7198 if (i == argno || VOID_TYPE_P (argtype))
7199 return argtype;
7201 ++i;
7204 return NULL_TREE;
7207 /* Nonzero if integer constants T1 and T2
7208 represent the same constant value. */
7211 tree_int_cst_equal (const_tree t1, const_tree t2)
7213 if (t1 == t2)
7214 return 1;
7216 if (t1 == 0 || t2 == 0)
7217 return 0;
7219 STRIP_ANY_LOCATION_WRAPPER (t1);
7220 STRIP_ANY_LOCATION_WRAPPER (t2);
7222 if (TREE_CODE (t1) == INTEGER_CST
7223 && TREE_CODE (t2) == INTEGER_CST
7224 && wi::to_widest (t1) == wi::to_widest (t2))
7225 return 1;
7227 return 0;
7230 /* Return true if T is an INTEGER_CST whose numerical value (extended
7231 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7233 bool
7234 tree_fits_shwi_p (const_tree t)
7236 return (t != NULL_TREE
7237 && TREE_CODE (t) == INTEGER_CST
7238 && wi::fits_shwi_p (wi::to_widest (t)));
7241 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7242 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7244 bool
7245 tree_fits_poly_int64_p (const_tree t)
7247 if (t == NULL_TREE)
7248 return false;
7249 if (POLY_INT_CST_P (t))
7251 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7252 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7253 return false;
7254 return true;
7256 return (TREE_CODE (t) == INTEGER_CST
7257 && wi::fits_shwi_p (wi::to_widest (t)));
7260 /* Return true if T is an INTEGER_CST whose numerical value (extended
7261 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7263 bool
7264 tree_fits_uhwi_p (const_tree t)
7266 return (t != NULL_TREE
7267 && TREE_CODE (t) == INTEGER_CST
7268 && wi::fits_uhwi_p (wi::to_widest (t)));
7271 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7272 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7274 bool
7275 tree_fits_poly_uint64_p (const_tree t)
7277 if (t == NULL_TREE)
7278 return false;
7279 if (POLY_INT_CST_P (t))
7281 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7282 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7283 return false;
7284 return true;
7286 return (TREE_CODE (t) == INTEGER_CST
7287 && wi::fits_uhwi_p (wi::to_widest (t)));
7290 /* T is an INTEGER_CST whose numerical value (extended according to
7291 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7292 HOST_WIDE_INT. */
7294 HOST_WIDE_INT
7295 tree_to_shwi (const_tree t)
7297 gcc_assert (tree_fits_shwi_p (t));
7298 return TREE_INT_CST_LOW (t);
7301 /* T is an INTEGER_CST whose numerical value (extended according to
7302 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7303 HOST_WIDE_INT. */
7305 unsigned HOST_WIDE_INT
7306 tree_to_uhwi (const_tree t)
7308 gcc_assert (tree_fits_uhwi_p (t));
7309 return TREE_INT_CST_LOW (t);
7312 /* Return the most significant (sign) bit of T. */
7315 tree_int_cst_sign_bit (const_tree t)
7317 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7319 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7322 /* Return an indication of the sign of the integer constant T.
7323 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7324 Note that -1 will never be returned if T's type is unsigned. */
7327 tree_int_cst_sgn (const_tree t)
7329 if (wi::to_wide (t) == 0)
7330 return 0;
7331 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7332 return 1;
7333 else if (wi::neg_p (wi::to_wide (t)))
7334 return -1;
7335 else
7336 return 1;
7339 /* Return the minimum number of bits needed to represent VALUE in a
7340 signed or unsigned type, UNSIGNEDP says which. */
7342 unsigned int
7343 tree_int_cst_min_precision (tree value, signop sgn)
7345 /* If the value is negative, compute its negative minus 1. The latter
7346 adjustment is because the absolute value of the largest negative value
7347 is one larger than the largest positive value. This is equivalent to
7348 a bit-wise negation, so use that operation instead. */
7350 if (tree_int_cst_sgn (value) < 0)
7351 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7353 /* Return the number of bits needed, taking into account the fact
7354 that we need one more bit for a signed than unsigned type.
7355 If value is 0 or -1, the minimum precision is 1 no matter
7356 whether unsignedp is true or false. */
7358 if (integer_zerop (value))
7359 return 1;
7360 else
7361 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7364 /* Return truthvalue of whether T1 is the same tree structure as T2.
7365 Return 1 if they are the same.
7366 Return 0 if they are understandably different.
7367 Return -1 if either contains tree structure not understood by
7368 this function. */
7371 simple_cst_equal (const_tree t1, const_tree t2)
7373 enum tree_code code1, code2;
7374 int cmp;
7375 int i;
7377 if (t1 == t2)
7378 return 1;
7379 if (t1 == 0 || t2 == 0)
7380 return 0;
7382 /* For location wrappers to be the same, they must be at the same
7383 source location (and wrap the same thing). */
7384 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7386 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7387 return 0;
7388 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7391 code1 = TREE_CODE (t1);
7392 code2 = TREE_CODE (t2);
7394 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7396 if (CONVERT_EXPR_CODE_P (code2)
7397 || code2 == NON_LVALUE_EXPR)
7398 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7399 else
7400 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7403 else if (CONVERT_EXPR_CODE_P (code2)
7404 || code2 == NON_LVALUE_EXPR)
7405 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7407 if (code1 != code2)
7408 return 0;
7410 switch (code1)
7412 case INTEGER_CST:
7413 return wi::to_widest (t1) == wi::to_widest (t2);
7415 case REAL_CST:
7416 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7418 case FIXED_CST:
7419 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7421 case STRING_CST:
7422 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7423 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7424 TREE_STRING_LENGTH (t1)));
7426 case CONSTRUCTOR:
7428 unsigned HOST_WIDE_INT idx;
7429 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7430 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7432 if (vec_safe_length (v1) != vec_safe_length (v2))
7433 return false;
7435 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7436 /* ??? Should we handle also fields here? */
7437 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7438 return false;
7439 return true;
7442 case SAVE_EXPR:
7443 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7445 case CALL_EXPR:
7446 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7447 if (cmp <= 0)
7448 return cmp;
7449 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7450 return 0;
7452 const_tree arg1, arg2;
7453 const_call_expr_arg_iterator iter1, iter2;
7454 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7455 arg2 = first_const_call_expr_arg (t2, &iter2);
7456 arg1 && arg2;
7457 arg1 = next_const_call_expr_arg (&iter1),
7458 arg2 = next_const_call_expr_arg (&iter2))
7460 cmp = simple_cst_equal (arg1, arg2);
7461 if (cmp <= 0)
7462 return cmp;
7464 return arg1 == arg2;
7467 case TARGET_EXPR:
7468 /* Special case: if either target is an unallocated VAR_DECL,
7469 it means that it's going to be unified with whatever the
7470 TARGET_EXPR is really supposed to initialize, so treat it
7471 as being equivalent to anything. */
7472 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7473 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7474 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7475 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7476 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7477 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7478 cmp = 1;
7479 else
7480 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7482 if (cmp <= 0)
7483 return cmp;
7485 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7487 case WITH_CLEANUP_EXPR:
7488 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7489 if (cmp <= 0)
7490 return cmp;
7492 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7494 case COMPONENT_REF:
7495 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7496 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7498 return 0;
7500 case VAR_DECL:
7501 case PARM_DECL:
7502 case CONST_DECL:
7503 case FUNCTION_DECL:
7504 return 0;
7506 default:
7507 if (POLY_INT_CST_P (t1))
7508 /* A false return means maybe_ne rather than known_ne. */
7509 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7510 TYPE_SIGN (TREE_TYPE (t1))),
7511 poly_widest_int::from (poly_int_cst_value (t2),
7512 TYPE_SIGN (TREE_TYPE (t2))));
7513 break;
7516 /* This general rule works for most tree codes. All exceptions should be
7517 handled above. If this is a language-specific tree code, we can't
7518 trust what might be in the operand, so say we don't know
7519 the situation. */
7520 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7521 return -1;
7523 switch (TREE_CODE_CLASS (code1))
7525 case tcc_unary:
7526 case tcc_binary:
7527 case tcc_comparison:
7528 case tcc_expression:
7529 case tcc_reference:
7530 case tcc_statement:
7531 cmp = 1;
7532 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7534 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7535 if (cmp <= 0)
7536 return cmp;
7539 return cmp;
7541 default:
7542 return -1;
7546 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7547 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7548 than U, respectively. */
7551 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7553 if (tree_int_cst_sgn (t) < 0)
7554 return -1;
7555 else if (!tree_fits_uhwi_p (t))
7556 return 1;
7557 else if (TREE_INT_CST_LOW (t) == u)
7558 return 0;
7559 else if (TREE_INT_CST_LOW (t) < u)
7560 return -1;
7561 else
7562 return 1;
7565 /* Return true if SIZE represents a constant size that is in bounds of
7566 what the middle-end and the backend accepts (covering not more than
7567 half of the address-space).
7568 When PERR is non-null, set *PERR on failure to the description of
7569 why SIZE is not valid. */
7571 bool
7572 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7574 if (POLY_INT_CST_P (size))
7576 if (TREE_OVERFLOW (size))
7577 return false;
7578 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7579 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7580 return false;
7581 return true;
7584 cst_size_error error;
7585 if (!perr)
7586 perr = &error;
7588 if (TREE_CODE (size) != INTEGER_CST)
7590 *perr = cst_size_not_constant;
7591 return false;
7594 if (TREE_OVERFLOW_P (size))
7596 *perr = cst_size_overflow;
7597 return false;
7600 if (tree_int_cst_sgn (size) < 0)
7602 *perr = cst_size_negative;
7603 return false;
7605 if (!tree_fits_uhwi_p (size)
7606 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7607 < wi::to_widest (size) * 2))
7609 *perr = cst_size_too_big;
7610 return false;
7613 return true;
7616 /* Return the precision of the type, or for a complex or vector type the
7617 precision of the type of its elements. */
7619 unsigned int
7620 element_precision (const_tree type)
7622 if (!TYPE_P (type))
7623 type = TREE_TYPE (type);
7624 enum tree_code code = TREE_CODE (type);
7625 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7626 type = TREE_TYPE (type);
7628 return TYPE_PRECISION (type);
7631 /* Return true if CODE represents an associative tree code. Otherwise
7632 return false. */
7633 bool
7634 associative_tree_code (enum tree_code code)
7636 switch (code)
7638 case BIT_IOR_EXPR:
7639 case BIT_AND_EXPR:
7640 case BIT_XOR_EXPR:
7641 case PLUS_EXPR:
7642 case MULT_EXPR:
7643 case MIN_EXPR:
7644 case MAX_EXPR:
7645 return true;
7647 default:
7648 break;
7650 return false;
7653 /* Return true if CODE represents a commutative tree code. Otherwise
7654 return false. */
7655 bool
7656 commutative_tree_code (enum tree_code code)
7658 switch (code)
7660 case PLUS_EXPR:
7661 case MULT_EXPR:
7662 case MULT_HIGHPART_EXPR:
7663 case MIN_EXPR:
7664 case MAX_EXPR:
7665 case BIT_IOR_EXPR:
7666 case BIT_XOR_EXPR:
7667 case BIT_AND_EXPR:
7668 case NE_EXPR:
7669 case EQ_EXPR:
7670 case UNORDERED_EXPR:
7671 case ORDERED_EXPR:
7672 case UNEQ_EXPR:
7673 case LTGT_EXPR:
7674 case TRUTH_AND_EXPR:
7675 case TRUTH_XOR_EXPR:
7676 case TRUTH_OR_EXPR:
7677 case WIDEN_MULT_EXPR:
7678 case VEC_WIDEN_MULT_HI_EXPR:
7679 case VEC_WIDEN_MULT_LO_EXPR:
7680 case VEC_WIDEN_MULT_EVEN_EXPR:
7681 case VEC_WIDEN_MULT_ODD_EXPR:
7682 return true;
7684 default:
7685 break;
7687 return false;
7690 /* Return true if CODE represents a ternary tree code for which the
7691 first two operands are commutative. Otherwise return false. */
7692 bool
7693 commutative_ternary_tree_code (enum tree_code code)
7695 switch (code)
7697 case WIDEN_MULT_PLUS_EXPR:
7698 case WIDEN_MULT_MINUS_EXPR:
7699 case DOT_PROD_EXPR:
7700 return true;
7702 default:
7703 break;
7705 return false;
7708 /* Returns true if CODE can overflow. */
7710 bool
7711 operation_can_overflow (enum tree_code code)
7713 switch (code)
7715 case PLUS_EXPR:
7716 case MINUS_EXPR:
7717 case MULT_EXPR:
7718 case LSHIFT_EXPR:
7719 /* Can overflow in various ways. */
7720 return true;
7721 case TRUNC_DIV_EXPR:
7722 case EXACT_DIV_EXPR:
7723 case FLOOR_DIV_EXPR:
7724 case CEIL_DIV_EXPR:
7725 /* For INT_MIN / -1. */
7726 return true;
7727 case NEGATE_EXPR:
7728 case ABS_EXPR:
7729 /* For -INT_MIN. */
7730 return true;
7731 default:
7732 /* These operators cannot overflow. */
7733 return false;
7737 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7738 ftrapv doesn't generate trapping insns for CODE. */
7740 bool
7741 operation_no_trapping_overflow (tree type, enum tree_code code)
7743 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7745 /* We don't generate instructions that trap on overflow for complex or vector
7746 types. */
7747 if (!INTEGRAL_TYPE_P (type))
7748 return true;
7750 if (!TYPE_OVERFLOW_TRAPS (type))
7751 return true;
7753 switch (code)
7755 case PLUS_EXPR:
7756 case MINUS_EXPR:
7757 case MULT_EXPR:
7758 case NEGATE_EXPR:
7759 case ABS_EXPR:
7760 /* These operators can overflow, and -ftrapv generates trapping code for
7761 these. */
7762 return false;
7763 case TRUNC_DIV_EXPR:
7764 case EXACT_DIV_EXPR:
7765 case FLOOR_DIV_EXPR:
7766 case CEIL_DIV_EXPR:
7767 case LSHIFT_EXPR:
7768 /* These operators can overflow, but -ftrapv does not generate trapping
7769 code for these. */
7770 return true;
7771 default:
7772 /* These operators cannot overflow. */
7773 return true;
7777 namespace inchash
7780 /* Generate a hash value for an expression. This can be used iteratively
7781 by passing a previous result as the HSTATE argument.
7783 This function is intended to produce the same hash for expressions which
7784 would compare equal using operand_equal_p. */
7785 void
7786 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7788 int i;
7789 enum tree_code code;
7790 enum tree_code_class tclass;
7792 if (t == NULL_TREE || t == error_mark_node)
7794 hstate.merge_hash (0);
7795 return;
7798 STRIP_ANY_LOCATION_WRAPPER (t);
7800 if (!(flags & OEP_ADDRESS_OF))
7801 STRIP_NOPS (t);
7803 code = TREE_CODE (t);
7805 switch (code)
7807 /* Alas, constants aren't shared, so we can't rely on pointer
7808 identity. */
7809 case VOID_CST:
7810 hstate.merge_hash (0);
7811 return;
7812 case INTEGER_CST:
7813 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7814 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7815 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7816 return;
7817 case REAL_CST:
7819 unsigned int val2;
7820 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7821 val2 = rvc_zero;
7822 else
7823 val2 = real_hash (TREE_REAL_CST_PTR (t));
7824 hstate.merge_hash (val2);
7825 return;
7827 case FIXED_CST:
7829 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7830 hstate.merge_hash (val2);
7831 return;
7833 case STRING_CST:
7834 hstate.add ((const void *) TREE_STRING_POINTER (t),
7835 TREE_STRING_LENGTH (t));
7836 return;
7837 case COMPLEX_CST:
7838 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7839 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7840 return;
7841 case VECTOR_CST:
7843 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7844 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7845 unsigned int count = vector_cst_encoded_nelts (t);
7846 for (unsigned int i = 0; i < count; ++i)
7847 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7848 return;
7850 case SSA_NAME:
7851 /* We can just compare by pointer. */
7852 hstate.add_hwi (SSA_NAME_VERSION (t));
7853 return;
7854 case PLACEHOLDER_EXPR:
7855 /* The node itself doesn't matter. */
7856 return;
7857 case BLOCK:
7858 case OMP_CLAUSE:
7859 /* Ignore. */
7860 return;
7861 case TREE_LIST:
7862 /* A list of expressions, for a CALL_EXPR or as the elements of a
7863 VECTOR_CST. */
7864 for (; t; t = TREE_CHAIN (t))
7865 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7866 return;
7867 case CONSTRUCTOR:
7869 unsigned HOST_WIDE_INT idx;
7870 tree field, value;
7871 flags &= ~OEP_ADDRESS_OF;
7872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7874 inchash::add_expr (field, hstate, flags);
7875 inchash::add_expr (value, hstate, flags);
7877 return;
7879 case STATEMENT_LIST:
7881 tree_stmt_iterator i;
7882 for (i = tsi_start (CONST_CAST_TREE (t));
7883 !tsi_end_p (i); tsi_next (&i))
7884 inchash::add_expr (tsi_stmt (i), hstate, flags);
7885 return;
7887 case TREE_VEC:
7888 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7889 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7890 return;
7891 case IDENTIFIER_NODE:
7892 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
7893 return;
7894 case FUNCTION_DECL:
7895 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7896 Otherwise nodes that compare equal according to operand_equal_p might
7897 get different hash codes. However, don't do this for machine specific
7898 or front end builtins, since the function code is overloaded in those
7899 cases. */
7900 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7901 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7903 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7904 code = TREE_CODE (t);
7906 /* FALL THROUGH */
7907 default:
7908 if (POLY_INT_CST_P (t))
7910 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7911 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7912 return;
7914 tclass = TREE_CODE_CLASS (code);
7916 if (tclass == tcc_declaration)
7918 /* DECL's have a unique ID */
7919 hstate.add_hwi (DECL_UID (t));
7921 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7923 /* For comparisons that can be swapped, use the lower
7924 tree code. */
7925 enum tree_code ccode = swap_tree_comparison (code);
7926 if (code < ccode)
7927 ccode = code;
7928 hstate.add_object (ccode);
7929 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7930 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7932 else if (CONVERT_EXPR_CODE_P (code))
7934 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7935 operand_equal_p. */
7936 enum tree_code ccode = NOP_EXPR;
7937 hstate.add_object (ccode);
7939 /* Don't hash the type, that can lead to having nodes which
7940 compare equal according to operand_equal_p, but which
7941 have different hash codes. Make sure to include signedness
7942 in the hash computation. */
7943 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7944 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7946 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7947 else if (code == MEM_REF
7948 && (flags & OEP_ADDRESS_OF) != 0
7949 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7950 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7951 && integer_zerop (TREE_OPERAND (t, 1)))
7952 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7953 hstate, flags);
7954 /* Don't ICE on FE specific trees, or their arguments etc.
7955 during operand_equal_p hash verification. */
7956 else if (!IS_EXPR_CODE_CLASS (tclass))
7957 gcc_assert (flags & OEP_HASH_CHECK);
7958 else
7960 unsigned int sflags = flags;
7962 hstate.add_object (code);
7964 switch (code)
7966 case ADDR_EXPR:
7967 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7968 flags |= OEP_ADDRESS_OF;
7969 sflags = flags;
7970 break;
7972 case INDIRECT_REF:
7973 case MEM_REF:
7974 case TARGET_MEM_REF:
7975 flags &= ~OEP_ADDRESS_OF;
7976 sflags = flags;
7977 break;
7979 case ARRAY_REF:
7980 case ARRAY_RANGE_REF:
7981 case COMPONENT_REF:
7982 case BIT_FIELD_REF:
7983 sflags &= ~OEP_ADDRESS_OF;
7984 break;
7986 case COND_EXPR:
7987 flags &= ~OEP_ADDRESS_OF;
7988 break;
7990 case WIDEN_MULT_PLUS_EXPR:
7991 case WIDEN_MULT_MINUS_EXPR:
7993 /* The multiplication operands are commutative. */
7994 inchash::hash one, two;
7995 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7996 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7997 hstate.add_commutative (one, two);
7998 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7999 return;
8002 case CALL_EXPR:
8003 if (CALL_EXPR_FN (t) == NULL_TREE)
8004 hstate.add_int (CALL_EXPR_IFN (t));
8005 break;
8007 case TARGET_EXPR:
8008 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
8009 Usually different TARGET_EXPRs just should use
8010 different temporaries in their slots. */
8011 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
8012 return;
8014 default:
8015 break;
8018 /* Don't hash the type, that can lead to having nodes which
8019 compare equal according to operand_equal_p, but which
8020 have different hash codes. */
8021 if (code == NON_LVALUE_EXPR)
8023 /* Make sure to include signness in the hash computation. */
8024 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
8025 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
8028 else if (commutative_tree_code (code))
8030 /* It's a commutative expression. We want to hash it the same
8031 however it appears. We do this by first hashing both operands
8032 and then rehashing based on the order of their independent
8033 hashes. */
8034 inchash::hash one, two;
8035 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8036 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8037 hstate.add_commutative (one, two);
8039 else
8040 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8041 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8042 i == 0 ? flags : sflags);
8044 return;
8050 /* Constructors for pointer, array and function types.
8051 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8052 constructed by language-dependent code, not here.) */
8054 /* Construct, lay out and return the type of pointers to TO_TYPE with
8055 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8056 reference all of memory. If such a type has already been
8057 constructed, reuse it. */
8059 tree
8060 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8061 bool can_alias_all)
8063 tree t;
8064 bool could_alias = can_alias_all;
8066 if (to_type == error_mark_node)
8067 return error_mark_node;
8069 /* If the pointed-to type has the may_alias attribute set, force
8070 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8071 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8072 can_alias_all = true;
8074 /* In some cases, languages will have things that aren't a POINTER_TYPE
8075 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8076 In that case, return that type without regard to the rest of our
8077 operands.
8079 ??? This is a kludge, but consistent with the way this function has
8080 always operated and there doesn't seem to be a good way to avoid this
8081 at the moment. */
8082 if (TYPE_POINTER_TO (to_type) != 0
8083 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8084 return TYPE_POINTER_TO (to_type);
8086 /* First, if we already have a type for pointers to TO_TYPE and it's
8087 the proper mode, use it. */
8088 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8089 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8090 return t;
8092 t = make_node (POINTER_TYPE);
8094 TREE_TYPE (t) = to_type;
8095 SET_TYPE_MODE (t, mode);
8096 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8097 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8098 TYPE_POINTER_TO (to_type) = t;
8100 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8101 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8102 SET_TYPE_STRUCTURAL_EQUALITY (t);
8103 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8104 TYPE_CANONICAL (t)
8105 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8106 mode, false);
8108 /* Lay out the type. This function has many callers that are concerned
8109 with expression-construction, and this simplifies them all. */
8110 layout_type (t);
8112 return t;
8115 /* By default build pointers in ptr_mode. */
8117 tree
8118 build_pointer_type (tree to_type)
8120 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8121 : TYPE_ADDR_SPACE (to_type);
8122 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8123 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8126 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8128 tree
8129 build_reference_type_for_mode (tree to_type, machine_mode mode,
8130 bool can_alias_all)
8132 tree t;
8133 bool could_alias = can_alias_all;
8135 if (to_type == error_mark_node)
8136 return error_mark_node;
8138 /* If the pointed-to type has the may_alias attribute set, force
8139 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8140 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8141 can_alias_all = true;
8143 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8144 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8145 In that case, return that type without regard to the rest of our
8146 operands.
8148 ??? This is a kludge, but consistent with the way this function has
8149 always operated and there doesn't seem to be a good way to avoid this
8150 at the moment. */
8151 if (TYPE_REFERENCE_TO (to_type) != 0
8152 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8153 return TYPE_REFERENCE_TO (to_type);
8155 /* First, if we already have a type for pointers to TO_TYPE and it's
8156 the proper mode, use it. */
8157 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8158 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8159 return t;
8161 t = make_node (REFERENCE_TYPE);
8163 TREE_TYPE (t) = to_type;
8164 SET_TYPE_MODE (t, mode);
8165 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8166 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8167 TYPE_REFERENCE_TO (to_type) = t;
8169 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8170 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8171 SET_TYPE_STRUCTURAL_EQUALITY (t);
8172 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8173 TYPE_CANONICAL (t)
8174 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8175 mode, false);
8177 layout_type (t);
8179 return t;
8183 /* Build the node for the type of references-to-TO_TYPE by default
8184 in ptr_mode. */
8186 tree
8187 build_reference_type (tree to_type)
8189 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8190 : TYPE_ADDR_SPACE (to_type);
8191 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8192 return build_reference_type_for_mode (to_type, pointer_mode, false);
8195 #define MAX_INT_CACHED_PREC \
8196 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8197 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8199 /* Builds a signed or unsigned integer type of precision PRECISION.
8200 Used for C bitfields whose precision does not match that of
8201 built-in target types. */
8202 tree
8203 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8204 int unsignedp)
8206 tree itype, ret;
8208 if (unsignedp)
8209 unsignedp = MAX_INT_CACHED_PREC + 1;
8211 if (precision <= MAX_INT_CACHED_PREC)
8213 itype = nonstandard_integer_type_cache[precision + unsignedp];
8214 if (itype)
8215 return itype;
8218 itype = make_node (INTEGER_TYPE);
8219 TYPE_PRECISION (itype) = precision;
8221 if (unsignedp)
8222 fixup_unsigned_type (itype);
8223 else
8224 fixup_signed_type (itype);
8226 inchash::hash hstate;
8227 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8228 ret = type_hash_canon (hstate.end (), itype);
8229 if (precision <= MAX_INT_CACHED_PREC)
8230 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8232 return ret;
8235 #define MAX_BOOL_CACHED_PREC \
8236 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8237 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8239 /* Builds a boolean type of precision PRECISION.
8240 Used for boolean vectors to choose proper vector element size. */
8241 tree
8242 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8244 tree type;
8246 if (precision <= MAX_BOOL_CACHED_PREC)
8248 type = nonstandard_boolean_type_cache[precision];
8249 if (type)
8250 return type;
8253 type = make_node (BOOLEAN_TYPE);
8254 TYPE_PRECISION (type) = precision;
8255 fixup_signed_type (type);
8257 if (precision <= MAX_INT_CACHED_PREC)
8258 nonstandard_boolean_type_cache[precision] = type;
8260 return type;
8263 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8264 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8265 is true, reuse such a type that has already been constructed. */
8267 static tree
8268 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8270 tree itype = make_node (INTEGER_TYPE);
8272 TREE_TYPE (itype) = type;
8274 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8275 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8277 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8278 SET_TYPE_MODE (itype, TYPE_MODE (type));
8279 TYPE_SIZE (itype) = TYPE_SIZE (type);
8280 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8281 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8282 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8283 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8285 if (!shared)
8286 return itype;
8288 if ((TYPE_MIN_VALUE (itype)
8289 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8290 || (TYPE_MAX_VALUE (itype)
8291 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8293 /* Since we cannot reliably merge this type, we need to compare it using
8294 structural equality checks. */
8295 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8296 return itype;
8299 hashval_t hash = type_hash_canon_hash (itype);
8300 itype = type_hash_canon (hash, itype);
8302 return itype;
8305 /* Wrapper around build_range_type_1 with SHARED set to true. */
8307 tree
8308 build_range_type (tree type, tree lowval, tree highval)
8310 return build_range_type_1 (type, lowval, highval, true);
8313 /* Wrapper around build_range_type_1 with SHARED set to false. */
8315 tree
8316 build_nonshared_range_type (tree type, tree lowval, tree highval)
8318 return build_range_type_1 (type, lowval, highval, false);
8321 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8322 MAXVAL should be the maximum value in the domain
8323 (one less than the length of the array).
8325 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8326 We don't enforce this limit, that is up to caller (e.g. language front end).
8327 The limit exists because the result is a signed type and we don't handle
8328 sizes that use more than one HOST_WIDE_INT. */
8330 tree
8331 build_index_type (tree maxval)
8333 return build_range_type (sizetype, size_zero_node, maxval);
8336 /* Return true if the debug information for TYPE, a subtype, should be emitted
8337 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8338 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8339 debug info and doesn't reflect the source code. */
8341 bool
8342 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8344 tree base_type = TREE_TYPE (type), low, high;
8346 /* Subrange types have a base type which is an integral type. */
8347 if (!INTEGRAL_TYPE_P (base_type))
8348 return false;
8350 /* Get the real bounds of the subtype. */
8351 if (lang_hooks.types.get_subrange_bounds)
8352 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8353 else
8355 low = TYPE_MIN_VALUE (type);
8356 high = TYPE_MAX_VALUE (type);
8359 /* If the type and its base type have the same representation and the same
8360 name, then the type is not a subrange but a copy of the base type. */
8361 if ((TREE_CODE (base_type) == INTEGER_TYPE
8362 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8363 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8364 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8365 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8366 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8367 return false;
8369 if (lowval)
8370 *lowval = low;
8371 if (highval)
8372 *highval = high;
8373 return true;
8376 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8377 and number of elements specified by the range of values of INDEX_TYPE.
8378 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8379 If SHARED is true, reuse such a type that has already been constructed. */
8381 static tree
8382 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8383 bool shared)
8385 tree t;
8387 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8389 error ("arrays of functions are not meaningful");
8390 elt_type = integer_type_node;
8393 t = make_node (ARRAY_TYPE);
8394 TREE_TYPE (t) = elt_type;
8395 TYPE_DOMAIN (t) = index_type;
8396 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8397 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8398 layout_type (t);
8400 /* If the element type is incomplete at this point we get marked for
8401 structural equality. Do not record these types in the canonical
8402 type hashtable. */
8403 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8404 return t;
8406 if (shared)
8408 hashval_t hash = type_hash_canon_hash (t);
8409 t = type_hash_canon (hash, t);
8412 if (TYPE_CANONICAL (t) == t)
8414 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8415 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8416 || in_lto_p)
8417 SET_TYPE_STRUCTURAL_EQUALITY (t);
8418 else if (TYPE_CANONICAL (elt_type) != elt_type
8419 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8420 TYPE_CANONICAL (t)
8421 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8422 index_type
8423 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8424 typeless_storage, shared);
8427 return t;
8430 /* Wrapper around build_array_type_1 with SHARED set to true. */
8432 tree
8433 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8435 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8438 /* Wrapper around build_array_type_1 with SHARED set to false. */
8440 tree
8441 build_nonshared_array_type (tree elt_type, tree index_type)
8443 return build_array_type_1 (elt_type, index_type, false, false);
8446 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8447 sizetype. */
8449 tree
8450 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8452 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8455 /* Recursively examines the array elements of TYPE, until a non-array
8456 element type is found. */
8458 tree
8459 strip_array_types (tree type)
8461 while (TREE_CODE (type) == ARRAY_TYPE)
8462 type = TREE_TYPE (type);
8464 return type;
8467 /* Computes the canonical argument types from the argument type list
8468 ARGTYPES.
8470 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8471 on entry to this function, or if any of the ARGTYPES are
8472 structural.
8474 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8475 true on entry to this function, or if any of the ARGTYPES are
8476 non-canonical.
8478 Returns a canonical argument list, which may be ARGTYPES when the
8479 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8480 true) or would not differ from ARGTYPES. */
8482 static tree
8483 maybe_canonicalize_argtypes (tree argtypes,
8484 bool *any_structural_p,
8485 bool *any_noncanonical_p)
8487 tree arg;
8488 bool any_noncanonical_argtypes_p = false;
8490 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8492 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8493 /* Fail gracefully by stating that the type is structural. */
8494 *any_structural_p = true;
8495 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8496 *any_structural_p = true;
8497 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8498 || TREE_PURPOSE (arg))
8499 /* If the argument has a default argument, we consider it
8500 non-canonical even though the type itself is canonical.
8501 That way, different variants of function and method types
8502 with default arguments will all point to the variant with
8503 no defaults as their canonical type. */
8504 any_noncanonical_argtypes_p = true;
8507 if (*any_structural_p)
8508 return argtypes;
8510 if (any_noncanonical_argtypes_p)
8512 /* Build the canonical list of argument types. */
8513 tree canon_argtypes = NULL_TREE;
8514 bool is_void = false;
8516 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8518 if (arg == void_list_node)
8519 is_void = true;
8520 else
8521 canon_argtypes = tree_cons (NULL_TREE,
8522 TYPE_CANONICAL (TREE_VALUE (arg)),
8523 canon_argtypes);
8526 canon_argtypes = nreverse (canon_argtypes);
8527 if (is_void)
8528 canon_argtypes = chainon (canon_argtypes, void_list_node);
8530 /* There is a non-canonical type. */
8531 *any_noncanonical_p = true;
8532 return canon_argtypes;
8535 /* The canonical argument types are the same as ARGTYPES. */
8536 return argtypes;
8539 /* Construct, lay out and return
8540 the type of functions returning type VALUE_TYPE
8541 given arguments of types ARG_TYPES.
8542 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8543 are data type nodes for the arguments of the function.
8544 If such a type has already been constructed, reuse it. */
8546 tree
8547 build_function_type (tree value_type, tree arg_types)
8549 tree t;
8550 inchash::hash hstate;
8551 bool any_structural_p, any_noncanonical_p;
8552 tree canon_argtypes;
8554 gcc_assert (arg_types != error_mark_node);
8556 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8558 error ("function return type cannot be function");
8559 value_type = integer_type_node;
8562 /* Make a node of the sort we want. */
8563 t = make_node (FUNCTION_TYPE);
8564 TREE_TYPE (t) = value_type;
8565 TYPE_ARG_TYPES (t) = arg_types;
8567 /* If we already have such a type, use the old one. */
8568 hashval_t hash = type_hash_canon_hash (t);
8569 t = type_hash_canon (hash, t);
8571 /* Set up the canonical type. */
8572 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8573 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8574 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8575 &any_structural_p,
8576 &any_noncanonical_p);
8577 if (any_structural_p)
8578 SET_TYPE_STRUCTURAL_EQUALITY (t);
8579 else if (any_noncanonical_p)
8580 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8581 canon_argtypes);
8583 if (!COMPLETE_TYPE_P (t))
8584 layout_type (t);
8585 return t;
8588 /* Build a function type. The RETURN_TYPE is the type returned by the
8589 function. If VAARGS is set, no void_type_node is appended to the
8590 list. ARGP must be always be terminated be a NULL_TREE. */
8592 static tree
8593 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8595 tree t, args, last;
8597 t = va_arg (argp, tree);
8598 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8599 args = tree_cons (NULL_TREE, t, args);
8601 if (vaargs)
8603 last = args;
8604 if (args != NULL_TREE)
8605 args = nreverse (args);
8606 gcc_assert (last != void_list_node);
8608 else if (args == NULL_TREE)
8609 args = void_list_node;
8610 else
8612 last = args;
8613 args = nreverse (args);
8614 TREE_CHAIN (last) = void_list_node;
8616 args = build_function_type (return_type, args);
8618 return args;
8621 /* Build a function type. The RETURN_TYPE is the type returned by the
8622 function. If additional arguments are provided, they are
8623 additional argument types. The list of argument types must always
8624 be terminated by NULL_TREE. */
8626 tree
8627 build_function_type_list (tree return_type, ...)
8629 tree args;
8630 va_list p;
8632 va_start (p, return_type);
8633 args = build_function_type_list_1 (false, return_type, p);
8634 va_end (p);
8635 return args;
8638 /* Build a variable argument function type. The RETURN_TYPE is the
8639 type returned by the function. If additional arguments are provided,
8640 they are additional argument types. The list of argument types must
8641 always be terminated by NULL_TREE. */
8643 tree
8644 build_varargs_function_type_list (tree return_type, ...)
8646 tree args;
8647 va_list p;
8649 va_start (p, return_type);
8650 args = build_function_type_list_1 (true, return_type, p);
8651 va_end (p);
8653 return args;
8656 /* Build a function type. RETURN_TYPE is the type returned by the
8657 function; VAARGS indicates whether the function takes varargs. The
8658 function takes N named arguments, the types of which are provided in
8659 ARG_TYPES. */
8661 static tree
8662 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8663 tree *arg_types)
8665 int i;
8666 tree t = vaargs ? NULL_TREE : void_list_node;
8668 for (i = n - 1; i >= 0; i--)
8669 t = tree_cons (NULL_TREE, arg_types[i], t);
8671 return build_function_type (return_type, t);
8674 /* Build a function type. RETURN_TYPE is the type returned by the
8675 function. The function takes N named arguments, the types of which
8676 are provided in ARG_TYPES. */
8678 tree
8679 build_function_type_array (tree return_type, int n, tree *arg_types)
8681 return build_function_type_array_1 (false, return_type, n, arg_types);
8684 /* Build a variable argument function type. RETURN_TYPE is the type
8685 returned by the function. The function takes N named arguments, the
8686 types of which are provided in ARG_TYPES. */
8688 tree
8689 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8691 return build_function_type_array_1 (true, return_type, n, arg_types);
8694 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8695 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8696 for the method. An implicit additional parameter (of type
8697 pointer-to-BASETYPE) is added to the ARGTYPES. */
8699 tree
8700 build_method_type_directly (tree basetype,
8701 tree rettype,
8702 tree argtypes)
8704 tree t;
8705 tree ptype;
8706 bool any_structural_p, any_noncanonical_p;
8707 tree canon_argtypes;
8709 /* Make a node of the sort we want. */
8710 t = make_node (METHOD_TYPE);
8712 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8713 TREE_TYPE (t) = rettype;
8714 ptype = build_pointer_type (basetype);
8716 /* The actual arglist for this function includes a "hidden" argument
8717 which is "this". Put it into the list of argument types. */
8718 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8719 TYPE_ARG_TYPES (t) = argtypes;
8721 /* If we already have such a type, use the old one. */
8722 hashval_t hash = type_hash_canon_hash (t);
8723 t = type_hash_canon (hash, t);
8725 /* Set up the canonical type. */
8726 any_structural_p
8727 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8728 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8729 any_noncanonical_p
8730 = (TYPE_CANONICAL (basetype) != basetype
8731 || TYPE_CANONICAL (rettype) != rettype);
8732 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8733 &any_structural_p,
8734 &any_noncanonical_p);
8735 if (any_structural_p)
8736 SET_TYPE_STRUCTURAL_EQUALITY (t);
8737 else if (any_noncanonical_p)
8738 TYPE_CANONICAL (t)
8739 = build_method_type_directly (TYPE_CANONICAL (basetype),
8740 TYPE_CANONICAL (rettype),
8741 canon_argtypes);
8742 if (!COMPLETE_TYPE_P (t))
8743 layout_type (t);
8745 return t;
8748 /* Construct, lay out and return the type of methods belonging to class
8749 BASETYPE and whose arguments and values are described by TYPE.
8750 If that type exists already, reuse it.
8751 TYPE must be a FUNCTION_TYPE node. */
8753 tree
8754 build_method_type (tree basetype, tree type)
8756 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8758 return build_method_type_directly (basetype,
8759 TREE_TYPE (type),
8760 TYPE_ARG_TYPES (type));
8763 /* Construct, lay out and return the type of offsets to a value
8764 of type TYPE, within an object of type BASETYPE.
8765 If a suitable offset type exists already, reuse it. */
8767 tree
8768 build_offset_type (tree basetype, tree type)
8770 tree t;
8772 /* Make a node of the sort we want. */
8773 t = make_node (OFFSET_TYPE);
8775 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8776 TREE_TYPE (t) = type;
8778 /* If we already have such a type, use the old one. */
8779 hashval_t hash = type_hash_canon_hash (t);
8780 t = type_hash_canon (hash, t);
8782 if (!COMPLETE_TYPE_P (t))
8783 layout_type (t);
8785 if (TYPE_CANONICAL (t) == t)
8787 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8788 || TYPE_STRUCTURAL_EQUALITY_P (type))
8789 SET_TYPE_STRUCTURAL_EQUALITY (t);
8790 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8791 || TYPE_CANONICAL (type) != type)
8792 TYPE_CANONICAL (t)
8793 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8794 TYPE_CANONICAL (type));
8797 return t;
8800 /* Create a complex type whose components are COMPONENT_TYPE.
8802 If NAMED is true, the type is given a TYPE_NAME. We do not always
8803 do so because this creates a DECL node and thus make the DECL_UIDs
8804 dependent on the type canonicalization hashtable, which is GC-ed,
8805 so the DECL_UIDs would not be stable wrt garbage collection. */
8807 tree
8808 build_complex_type (tree component_type, bool named)
8810 gcc_assert (INTEGRAL_TYPE_P (component_type)
8811 || SCALAR_FLOAT_TYPE_P (component_type)
8812 || FIXED_POINT_TYPE_P (component_type));
8814 /* Make a node of the sort we want. */
8815 tree probe = make_node (COMPLEX_TYPE);
8817 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8819 /* If we already have such a type, use the old one. */
8820 hashval_t hash = type_hash_canon_hash (probe);
8821 tree t = type_hash_canon (hash, probe);
8823 if (t == probe)
8825 /* We created a new type. The hash insertion will have laid
8826 out the type. We need to check the canonicalization and
8827 maybe set the name. */
8828 gcc_checking_assert (COMPLETE_TYPE_P (t)
8829 && !TYPE_NAME (t)
8830 && TYPE_CANONICAL (t) == t);
8832 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8833 SET_TYPE_STRUCTURAL_EQUALITY (t);
8834 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8835 TYPE_CANONICAL (t)
8836 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8838 /* We need to create a name, since complex is a fundamental type. */
8839 if (named)
8841 const char *name = NULL;
8843 if (TREE_TYPE (t) == char_type_node)
8844 name = "complex char";
8845 else if (TREE_TYPE (t) == signed_char_type_node)
8846 name = "complex signed char";
8847 else if (TREE_TYPE (t) == unsigned_char_type_node)
8848 name = "complex unsigned char";
8849 else if (TREE_TYPE (t) == short_integer_type_node)
8850 name = "complex short int";
8851 else if (TREE_TYPE (t) == short_unsigned_type_node)
8852 name = "complex short unsigned int";
8853 else if (TREE_TYPE (t) == integer_type_node)
8854 name = "complex int";
8855 else if (TREE_TYPE (t) == unsigned_type_node)
8856 name = "complex unsigned int";
8857 else if (TREE_TYPE (t) == long_integer_type_node)
8858 name = "complex long int";
8859 else if (TREE_TYPE (t) == long_unsigned_type_node)
8860 name = "complex long unsigned int";
8861 else if (TREE_TYPE (t) == long_long_integer_type_node)
8862 name = "complex long long int";
8863 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8864 name = "complex long long unsigned int";
8866 if (name != NULL)
8867 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8868 get_identifier (name), t);
8872 return build_qualified_type (t, TYPE_QUALS (component_type));
8875 /* If TYPE is a real or complex floating-point type and the target
8876 does not directly support arithmetic on TYPE then return the wider
8877 type to be used for arithmetic on TYPE. Otherwise, return
8878 NULL_TREE. */
8880 tree
8881 excess_precision_type (tree type)
8883 /* The target can give two different responses to the question of
8884 which excess precision mode it would like depending on whether we
8885 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8887 enum excess_precision_type requested_type
8888 = (flag_excess_precision == EXCESS_PRECISION_FAST
8889 ? EXCESS_PRECISION_TYPE_FAST
8890 : EXCESS_PRECISION_TYPE_STANDARD);
8892 enum flt_eval_method target_flt_eval_method
8893 = targetm.c.excess_precision (requested_type);
8895 /* The target should not ask for unpredictable float evaluation (though
8896 it might advertise that implicitly the evaluation is unpredictable,
8897 but we don't care about that here, it will have been reported
8898 elsewhere). If it does ask for unpredictable evaluation, we have
8899 nothing to do here. */
8900 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8902 /* Nothing to do. The target has asked for all types we know about
8903 to be computed with their native precision and range. */
8904 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8905 return NULL_TREE;
8907 /* The target will promote this type in a target-dependent way, so excess
8908 precision ought to leave it alone. */
8909 if (targetm.promoted_type (type) != NULL_TREE)
8910 return NULL_TREE;
8912 machine_mode float16_type_mode = (float16_type_node
8913 ? TYPE_MODE (float16_type_node)
8914 : VOIDmode);
8915 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8916 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8918 switch (TREE_CODE (type))
8920 case REAL_TYPE:
8922 machine_mode type_mode = TYPE_MODE (type);
8923 switch (target_flt_eval_method)
8925 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8926 if (type_mode == float16_type_mode)
8927 return float_type_node;
8928 break;
8929 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8930 if (type_mode == float16_type_mode
8931 || type_mode == float_type_mode)
8932 return double_type_node;
8933 break;
8934 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8935 if (type_mode == float16_type_mode
8936 || type_mode == float_type_mode
8937 || type_mode == double_type_mode)
8938 return long_double_type_node;
8939 break;
8940 default:
8941 gcc_unreachable ();
8943 break;
8945 case COMPLEX_TYPE:
8947 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8948 return NULL_TREE;
8949 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8950 switch (target_flt_eval_method)
8952 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8953 if (type_mode == float16_type_mode)
8954 return complex_float_type_node;
8955 break;
8956 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8957 if (type_mode == float16_type_mode
8958 || type_mode == float_type_mode)
8959 return complex_double_type_node;
8960 break;
8961 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8962 if (type_mode == float16_type_mode
8963 || type_mode == float_type_mode
8964 || type_mode == double_type_mode)
8965 return complex_long_double_type_node;
8966 break;
8967 default:
8968 gcc_unreachable ();
8970 break;
8972 default:
8973 break;
8976 return NULL_TREE;
8979 /* Return OP, stripped of any conversions to wider types as much as is safe.
8980 Converting the value back to OP's type makes a value equivalent to OP.
8982 If FOR_TYPE is nonzero, we return a value which, if converted to
8983 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8985 OP must have integer, real or enumeral type. Pointers are not allowed!
8987 There are some cases where the obvious value we could return
8988 would regenerate to OP if converted to OP's type,
8989 but would not extend like OP to wider types.
8990 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8991 For example, if OP is (unsigned short)(signed char)-1,
8992 we avoid returning (signed char)-1 if FOR_TYPE is int,
8993 even though extending that to an unsigned short would regenerate OP,
8994 since the result of extending (signed char)-1 to (int)
8995 is different from (int) OP. */
8997 tree
8998 get_unwidened (tree op, tree for_type)
9000 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
9001 tree type = TREE_TYPE (op);
9002 unsigned final_prec
9003 = TYPE_PRECISION (for_type != 0 ? for_type : type);
9004 int uns
9005 = (for_type != 0 && for_type != type
9006 && final_prec > TYPE_PRECISION (type)
9007 && TYPE_UNSIGNED (type));
9008 tree win = op;
9010 while (CONVERT_EXPR_P (op))
9012 int bitschange;
9014 /* TYPE_PRECISION on vector types has different meaning
9015 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
9016 so avoid them here. */
9017 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
9018 break;
9020 bitschange = TYPE_PRECISION (TREE_TYPE (op))
9021 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
9023 /* Truncations are many-one so cannot be removed.
9024 Unless we are later going to truncate down even farther. */
9025 if (bitschange < 0
9026 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
9027 break;
9029 /* See what's inside this conversion. If we decide to strip it,
9030 we will set WIN. */
9031 op = TREE_OPERAND (op, 0);
9033 /* If we have not stripped any zero-extensions (uns is 0),
9034 we can strip any kind of extension.
9035 If we have previously stripped a zero-extension,
9036 only zero-extensions can safely be stripped.
9037 Any extension can be stripped if the bits it would produce
9038 are all going to be discarded later by truncating to FOR_TYPE. */
9040 if (bitschange > 0)
9042 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
9043 win = op;
9044 /* TYPE_UNSIGNED says whether this is a zero-extension.
9045 Let's avoid computing it if it does not affect WIN
9046 and if UNS will not be needed again. */
9047 if ((uns
9048 || CONVERT_EXPR_P (op))
9049 && TYPE_UNSIGNED (TREE_TYPE (op)))
9051 uns = 1;
9052 win = op;
9057 /* If we finally reach a constant see if it fits in sth smaller and
9058 in that case convert it. */
9059 if (TREE_CODE (win) == INTEGER_CST)
9061 tree wtype = TREE_TYPE (win);
9062 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
9063 if (for_type)
9064 prec = MAX (prec, final_prec);
9065 if (prec < TYPE_PRECISION (wtype))
9067 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
9068 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
9069 win = fold_convert (t, win);
9073 return win;
9076 /* Return OP or a simpler expression for a narrower value
9077 which can be sign-extended or zero-extended to give back OP.
9078 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
9079 or 0 if the value should be sign-extended. */
9081 tree
9082 get_narrower (tree op, int *unsignedp_ptr)
9084 int uns = 0;
9085 int first = 1;
9086 tree win = op;
9087 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
9089 while (TREE_CODE (op) == NOP_EXPR)
9091 int bitschange
9092 = (TYPE_PRECISION (TREE_TYPE (op))
9093 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9095 /* Truncations are many-one so cannot be removed. */
9096 if (bitschange < 0)
9097 break;
9099 /* See what's inside this conversion. If we decide to strip it,
9100 we will set WIN. */
9102 if (bitschange > 0)
9104 op = TREE_OPERAND (op, 0);
9105 /* An extension: the outermost one can be stripped,
9106 but remember whether it is zero or sign extension. */
9107 if (first)
9108 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9109 /* Otherwise, if a sign extension has been stripped,
9110 only sign extensions can now be stripped;
9111 if a zero extension has been stripped, only zero-extensions. */
9112 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9113 break;
9114 first = 0;
9116 else /* bitschange == 0 */
9118 /* A change in nominal type can always be stripped, but we must
9119 preserve the unsignedness. */
9120 if (first)
9121 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9122 first = 0;
9123 op = TREE_OPERAND (op, 0);
9124 /* Keep trying to narrow, but don't assign op to win if it
9125 would turn an integral type into something else. */
9126 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9127 continue;
9130 win = op;
9133 if (TREE_CODE (op) == COMPONENT_REF
9134 /* Since type_for_size always gives an integer type. */
9135 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9136 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9137 /* Ensure field is laid out already. */
9138 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9139 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9141 unsigned HOST_WIDE_INT innerprec
9142 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9143 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9144 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9145 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9147 /* We can get this structure field in a narrower type that fits it,
9148 but the resulting extension to its nominal type (a fullword type)
9149 must satisfy the same conditions as for other extensions.
9151 Do this only for fields that are aligned (not bit-fields),
9152 because when bit-field insns will be used there is no
9153 advantage in doing this. */
9155 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9156 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9157 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9158 && type != 0)
9160 if (first)
9161 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9162 win = fold_convert (type, op);
9166 *unsignedp_ptr = uns;
9167 return win;
9170 /* Return true if integer constant C has a value that is permissible
9171 for TYPE, an integral type. */
9173 bool
9174 int_fits_type_p (const_tree c, const_tree type)
9176 tree type_low_bound, type_high_bound;
9177 bool ok_for_low_bound, ok_for_high_bound;
9178 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9180 /* Non-standard boolean types can have arbitrary precision but various
9181 transformations assume that they can only take values 0 and +/-1. */
9182 if (TREE_CODE (type) == BOOLEAN_TYPE)
9183 return wi::fits_to_boolean_p (wi::to_wide (c), type);
9185 retry:
9186 type_low_bound = TYPE_MIN_VALUE (type);
9187 type_high_bound = TYPE_MAX_VALUE (type);
9189 /* If at least one bound of the type is a constant integer, we can check
9190 ourselves and maybe make a decision. If no such decision is possible, but
9191 this type is a subtype, try checking against that. Otherwise, use
9192 fits_to_tree_p, which checks against the precision.
9194 Compute the status for each possibly constant bound, and return if we see
9195 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9196 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9197 for "constant known to fit". */
9199 /* Check if c >= type_low_bound. */
9200 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9202 if (tree_int_cst_lt (c, type_low_bound))
9203 return false;
9204 ok_for_low_bound = true;
9206 else
9207 ok_for_low_bound = false;
9209 /* Check if c <= type_high_bound. */
9210 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9212 if (tree_int_cst_lt (type_high_bound, c))
9213 return false;
9214 ok_for_high_bound = true;
9216 else
9217 ok_for_high_bound = false;
9219 /* If the constant fits both bounds, the result is known. */
9220 if (ok_for_low_bound && ok_for_high_bound)
9221 return true;
9223 /* Perform some generic filtering which may allow making a decision
9224 even if the bounds are not constant. First, negative integers
9225 never fit in unsigned types, */
9226 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9227 return false;
9229 /* Second, narrower types always fit in wider ones. */
9230 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9231 return true;
9233 /* Third, unsigned integers with top bit set never fit signed types. */
9234 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9236 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9237 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9239 /* When a tree_cst is converted to a wide-int, the precision
9240 is taken from the type. However, if the precision of the
9241 mode underneath the type is smaller than that, it is
9242 possible that the value will not fit. The test below
9243 fails if any bit is set between the sign bit of the
9244 underlying mode and the top bit of the type. */
9245 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9246 return false;
9248 else if (wi::neg_p (wi::to_wide (c)))
9249 return false;
9252 /* If we haven't been able to decide at this point, there nothing more we
9253 can check ourselves here. Look at the base type if we have one and it
9254 has the same precision. */
9255 if (TREE_CODE (type) == INTEGER_TYPE
9256 && TREE_TYPE (type) != 0
9257 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9259 type = TREE_TYPE (type);
9260 goto retry;
9263 /* Or to fits_to_tree_p, if nothing else. */
9264 return wi::fits_to_tree_p (wi::to_wide (c), type);
9267 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9268 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9269 represented (assuming two's-complement arithmetic) within the bit
9270 precision of the type are returned instead. */
9272 void
9273 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9275 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9276 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9277 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9278 else
9280 if (TYPE_UNSIGNED (type))
9281 mpz_set_ui (min, 0);
9282 else
9284 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9285 wi::to_mpz (mn, min, SIGNED);
9289 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9290 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9291 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9292 else
9294 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9295 wi::to_mpz (mn, max, TYPE_SIGN (type));
9299 /* Return true if VAR is an automatic variable. */
9301 bool
9302 auto_var_p (const_tree var)
9304 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9305 || TREE_CODE (var) == PARM_DECL)
9306 && ! TREE_STATIC (var))
9307 || TREE_CODE (var) == RESULT_DECL);
9310 /* Return true if VAR is an automatic variable defined in function FN. */
9312 bool
9313 auto_var_in_fn_p (const_tree var, const_tree fn)
9315 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9316 && (auto_var_p (var)
9317 || TREE_CODE (var) == LABEL_DECL));
9320 /* Subprogram of following function. Called by walk_tree.
9322 Return *TP if it is an automatic variable or parameter of the
9323 function passed in as DATA. */
9325 static tree
9326 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9328 tree fn = (tree) data;
9330 if (TYPE_P (*tp))
9331 *walk_subtrees = 0;
9333 else if (DECL_P (*tp)
9334 && auto_var_in_fn_p (*tp, fn))
9335 return *tp;
9337 return NULL_TREE;
9340 /* Returns true if T is, contains, or refers to a type with variable
9341 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9342 arguments, but not the return type. If FN is nonzero, only return
9343 true if a modifier of the type or position of FN is a variable or
9344 parameter inside FN.
9346 This concept is more general than that of C99 'variably modified types':
9347 in C99, a struct type is never variably modified because a VLA may not
9348 appear as a structure member. However, in GNU C code like:
9350 struct S { int i[f()]; };
9352 is valid, and other languages may define similar constructs. */
9354 bool
9355 variably_modified_type_p (tree type, tree fn)
9357 tree t;
9359 /* Test if T is either variable (if FN is zero) or an expression containing
9360 a variable in FN. If TYPE isn't gimplified, return true also if
9361 gimplify_one_sizepos would gimplify the expression into a local
9362 variable. */
9363 #define RETURN_TRUE_IF_VAR(T) \
9364 do { tree _t = (T); \
9365 if (_t != NULL_TREE \
9366 && _t != error_mark_node \
9367 && !CONSTANT_CLASS_P (_t) \
9368 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9369 && (!fn \
9370 || (!TYPE_SIZES_GIMPLIFIED (type) \
9371 && (TREE_CODE (_t) != VAR_DECL \
9372 && !CONTAINS_PLACEHOLDER_P (_t))) \
9373 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9374 return true; } while (0)
9376 if (type == error_mark_node)
9377 return false;
9379 /* If TYPE itself has variable size, it is variably modified. */
9380 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9381 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9383 switch (TREE_CODE (type))
9385 case POINTER_TYPE:
9386 case REFERENCE_TYPE:
9387 case VECTOR_TYPE:
9388 /* Ada can have pointer types refering to themselves indirectly. */
9389 if (TREE_VISITED (type))
9390 return false;
9391 TREE_VISITED (type) = true;
9392 if (variably_modified_type_p (TREE_TYPE (type), fn))
9394 TREE_VISITED (type) = false;
9395 return true;
9397 TREE_VISITED (type) = false;
9398 break;
9400 case FUNCTION_TYPE:
9401 case METHOD_TYPE:
9402 /* If TYPE is a function type, it is variably modified if the
9403 return type is variably modified. */
9404 if (variably_modified_type_p (TREE_TYPE (type), fn))
9405 return true;
9406 break;
9408 case INTEGER_TYPE:
9409 case REAL_TYPE:
9410 case FIXED_POINT_TYPE:
9411 case ENUMERAL_TYPE:
9412 case BOOLEAN_TYPE:
9413 /* Scalar types are variably modified if their end points
9414 aren't constant. */
9415 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9416 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9417 break;
9419 case RECORD_TYPE:
9420 case UNION_TYPE:
9421 case QUAL_UNION_TYPE:
9422 /* We can't see if any of the fields are variably-modified by the
9423 definition we normally use, since that would produce infinite
9424 recursion via pointers. */
9425 /* This is variably modified if some field's type is. */
9426 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9427 if (TREE_CODE (t) == FIELD_DECL)
9429 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9430 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9431 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9433 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9434 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9436 break;
9438 case ARRAY_TYPE:
9439 /* Do not call ourselves to avoid infinite recursion. This is
9440 variably modified if the element type is. */
9441 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9442 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9443 break;
9445 default:
9446 break;
9449 /* The current language may have other cases to check, but in general,
9450 all other types are not variably modified. */
9451 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9453 #undef RETURN_TRUE_IF_VAR
9456 /* Given a DECL or TYPE, return the scope in which it was declared, or
9457 NULL_TREE if there is no containing scope. */
9459 tree
9460 get_containing_scope (const_tree t)
9462 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9465 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9467 const_tree
9468 get_ultimate_context (const_tree decl)
9470 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9472 if (TREE_CODE (decl) == BLOCK)
9473 decl = BLOCK_SUPERCONTEXT (decl);
9474 else
9475 decl = get_containing_scope (decl);
9477 return decl;
9480 /* Return the innermost context enclosing DECL that is
9481 a FUNCTION_DECL, or zero if none. */
9483 tree
9484 decl_function_context (const_tree decl)
9486 tree context;
9488 if (TREE_CODE (decl) == ERROR_MARK)
9489 return 0;
9491 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9492 where we look up the function at runtime. Such functions always take
9493 a first argument of type 'pointer to real context'.
9495 C++ should really be fixed to use DECL_CONTEXT for the real context,
9496 and use something else for the "virtual context". */
9497 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9498 context
9499 = TYPE_MAIN_VARIANT
9500 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9501 else
9502 context = DECL_CONTEXT (decl);
9504 while (context && TREE_CODE (context) != FUNCTION_DECL)
9506 if (TREE_CODE (context) == BLOCK)
9507 context = BLOCK_SUPERCONTEXT (context);
9508 else
9509 context = get_containing_scope (context);
9512 return context;
9515 /* Return the innermost context enclosing DECL that is
9516 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9517 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9519 tree
9520 decl_type_context (const_tree decl)
9522 tree context = DECL_CONTEXT (decl);
9524 while (context)
9525 switch (TREE_CODE (context))
9527 case NAMESPACE_DECL:
9528 case TRANSLATION_UNIT_DECL:
9529 return NULL_TREE;
9531 case RECORD_TYPE:
9532 case UNION_TYPE:
9533 case QUAL_UNION_TYPE:
9534 return context;
9536 case TYPE_DECL:
9537 case FUNCTION_DECL:
9538 context = DECL_CONTEXT (context);
9539 break;
9541 case BLOCK:
9542 context = BLOCK_SUPERCONTEXT (context);
9543 break;
9545 default:
9546 gcc_unreachable ();
9549 return NULL_TREE;
9552 /* CALL is a CALL_EXPR. Return the declaration for the function
9553 called, or NULL_TREE if the called function cannot be
9554 determined. */
9556 tree
9557 get_callee_fndecl (const_tree call)
9559 tree addr;
9561 if (call == error_mark_node)
9562 return error_mark_node;
9564 /* It's invalid to call this function with anything but a
9565 CALL_EXPR. */
9566 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9568 /* The first operand to the CALL is the address of the function
9569 called. */
9570 addr = CALL_EXPR_FN (call);
9572 /* If there is no function, return early. */
9573 if (addr == NULL_TREE)
9574 return NULL_TREE;
9576 STRIP_NOPS (addr);
9578 /* If this is a readonly function pointer, extract its initial value. */
9579 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9580 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9581 && DECL_INITIAL (addr))
9582 addr = DECL_INITIAL (addr);
9584 /* If the address is just `&f' for some function `f', then we know
9585 that `f' is being called. */
9586 if (TREE_CODE (addr) == ADDR_EXPR
9587 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9588 return TREE_OPERAND (addr, 0);
9590 /* We couldn't figure out what was being called. */
9591 return NULL_TREE;
9594 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9595 return the associated function code, otherwise return CFN_LAST. */
9597 combined_fn
9598 get_call_combined_fn (const_tree call)
9600 /* It's invalid to call this function with anything but a CALL_EXPR. */
9601 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9603 if (!CALL_EXPR_FN (call))
9604 return as_combined_fn (CALL_EXPR_IFN (call));
9606 tree fndecl = get_callee_fndecl (call);
9607 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9608 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9610 return CFN_LAST;
9613 /* Comparator of indices based on tree_node_counts. */
9615 static int
9616 tree_nodes_cmp (const void *p1, const void *p2)
9618 const unsigned *n1 = (const unsigned *)p1;
9619 const unsigned *n2 = (const unsigned *)p2;
9621 return tree_node_counts[*n1] - tree_node_counts[*n2];
9624 /* Comparator of indices based on tree_code_counts. */
9626 static int
9627 tree_codes_cmp (const void *p1, const void *p2)
9629 const unsigned *n1 = (const unsigned *)p1;
9630 const unsigned *n2 = (const unsigned *)p2;
9632 return tree_code_counts[*n1] - tree_code_counts[*n2];
9635 #define TREE_MEM_USAGE_SPACES 40
9637 /* Print debugging information about tree nodes generated during the compile,
9638 and any language-specific information. */
9640 void
9641 dump_tree_statistics (void)
9643 if (GATHER_STATISTICS)
9645 uint64_t total_nodes, total_bytes;
9646 fprintf (stderr, "\nKind Nodes Bytes\n");
9647 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9648 total_nodes = total_bytes = 0;
9651 auto_vec<unsigned> indices (all_kinds);
9652 for (unsigned i = 0; i < all_kinds; i++)
9653 indices.quick_push (i);
9654 indices.qsort (tree_nodes_cmp);
9656 for (unsigned i = 0; i < (int) all_kinds; i++)
9658 unsigned j = indices[i];
9659 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9660 tree_node_kind_names[i], SIZE_AMOUNT (tree_node_counts[j]),
9661 SIZE_AMOUNT (tree_node_sizes[j]));
9662 total_nodes += tree_node_counts[j];
9663 total_bytes += tree_node_sizes[j];
9665 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9666 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9667 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9668 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9672 fprintf (stderr, "Code Nodes\n");
9673 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9675 auto_vec<unsigned> indices (MAX_TREE_CODES);
9676 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9677 indices.quick_push (i);
9678 indices.qsort (tree_codes_cmp);
9680 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9682 unsigned j = indices[i];
9683 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9684 get_tree_code_name ((enum tree_code) j),
9685 SIZE_AMOUNT (tree_code_counts[j]));
9687 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9688 fprintf (stderr, "\n");
9689 ssanames_print_statistics ();
9690 fprintf (stderr, "\n");
9691 phinodes_print_statistics ();
9692 fprintf (stderr, "\n");
9695 else
9696 fprintf (stderr, "(No per-node statistics)\n");
9698 print_type_hash_statistics ();
9699 print_debug_expr_statistics ();
9700 print_value_expr_statistics ();
9701 lang_hooks.print_statistics ();
9704 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9706 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9708 unsigned
9709 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9711 /* This relies on the raw feedback's top 4 bits being zero. */
9712 #define FEEDBACK(X) ((X) * 0x04c11db7)
9713 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9714 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9715 static const unsigned syndromes[16] =
9717 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9718 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9719 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9720 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9722 #undef FEEDBACK
9723 #undef SYNDROME
9725 value <<= (32 - bytes * 8);
9726 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9728 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9730 chksum = (chksum << 4) ^ feedback;
9733 return chksum;
9736 /* Generate a crc32 of a string. */
9738 unsigned
9739 crc32_string (unsigned chksum, const char *string)
9742 chksum = crc32_byte (chksum, *string);
9743 while (*string++);
9744 return chksum;
9747 /* P is a string that will be used in a symbol. Mask out any characters
9748 that are not valid in that context. */
9750 void
9751 clean_symbol_name (char *p)
9753 for (; *p; p++)
9754 if (! (ISALNUM (*p)
9755 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9756 || *p == '$'
9757 #endif
9758 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9759 || *p == '.'
9760 #endif
9762 *p = '_';
9765 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9767 /* Create a unique anonymous identifier. The identifier is still a
9768 valid assembly label. */
9770 tree
9771 make_anon_name ()
9773 const char *fmt =
9774 #if !defined (NO_DOT_IN_LABEL)
9776 #elif !defined (NO_DOLLAR_IN_LABEL)
9778 #else
9780 #endif
9781 "_anon_%d";
9783 char buf[24];
9784 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9785 gcc_checking_assert (len < int (sizeof (buf)));
9787 tree id = get_identifier_with_length (buf, len);
9788 IDENTIFIER_ANON_P (id) = true;
9790 return id;
9793 /* Generate a name for a special-purpose function.
9794 The generated name may need to be unique across the whole link.
9795 Changes to this function may also require corresponding changes to
9796 xstrdup_mask_random.
9797 TYPE is some string to identify the purpose of this function to the
9798 linker or collect2; it must start with an uppercase letter,
9799 one of:
9800 I - for constructors
9801 D - for destructors
9802 N - for C++ anonymous namespaces
9803 F - for DWARF unwind frame information. */
9805 tree
9806 get_file_function_name (const char *type)
9808 char *buf;
9809 const char *p;
9810 char *q;
9812 /* If we already have a name we know to be unique, just use that. */
9813 if (first_global_object_name)
9814 p = q = ASTRDUP (first_global_object_name);
9815 /* If the target is handling the constructors/destructors, they
9816 will be local to this file and the name is only necessary for
9817 debugging purposes.
9818 We also assign sub_I and sub_D sufixes to constructors called from
9819 the global static constructors. These are always local. */
9820 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9821 || (strncmp (type, "sub_", 4) == 0
9822 && (type[4] == 'I' || type[4] == 'D')))
9824 const char *file = main_input_filename;
9825 if (! file)
9826 file = LOCATION_FILE (input_location);
9827 /* Just use the file's basename, because the full pathname
9828 might be quite long. */
9829 p = q = ASTRDUP (lbasename (file));
9831 else
9833 /* Otherwise, the name must be unique across the entire link.
9834 We don't have anything that we know to be unique to this translation
9835 unit, so use what we do have and throw in some randomness. */
9836 unsigned len;
9837 const char *name = weak_global_object_name;
9838 const char *file = main_input_filename;
9840 if (! name)
9841 name = "";
9842 if (! file)
9843 file = LOCATION_FILE (input_location);
9845 len = strlen (file);
9846 q = (char *) alloca (9 + 19 + len + 1);
9847 memcpy (q, file, len + 1);
9849 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9850 crc32_string (0, name), get_random_seed (false));
9852 p = q;
9855 clean_symbol_name (q);
9856 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9857 + strlen (type));
9859 /* Set up the name of the file-level functions we may need.
9860 Use a global object (which is already required to be unique over
9861 the program) rather than the file name (which imposes extra
9862 constraints). */
9863 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9865 return get_identifier (buf);
9868 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9870 /* Complain that the tree code of NODE does not match the expected 0
9871 terminated list of trailing codes. The trailing code list can be
9872 empty, for a more vague error message. FILE, LINE, and FUNCTION
9873 are of the caller. */
9875 void
9876 tree_check_failed (const_tree node, const char *file,
9877 int line, const char *function, ...)
9879 va_list args;
9880 const char *buffer;
9881 unsigned length = 0;
9882 enum tree_code code;
9884 va_start (args, function);
9885 while ((code = (enum tree_code) va_arg (args, int)))
9886 length += 4 + strlen (get_tree_code_name (code));
9887 va_end (args);
9888 if (length)
9890 char *tmp;
9891 va_start (args, function);
9892 length += strlen ("expected ");
9893 buffer = tmp = (char *) alloca (length);
9894 length = 0;
9895 while ((code = (enum tree_code) va_arg (args, int)))
9897 const char *prefix = length ? " or " : "expected ";
9899 strcpy (tmp + length, prefix);
9900 length += strlen (prefix);
9901 strcpy (tmp + length, get_tree_code_name (code));
9902 length += strlen (get_tree_code_name (code));
9904 va_end (args);
9906 else
9907 buffer = "unexpected node";
9909 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9910 buffer, get_tree_code_name (TREE_CODE (node)),
9911 function, trim_filename (file), line);
9914 /* Complain that the tree code of NODE does match the expected 0
9915 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9916 the caller. */
9918 void
9919 tree_not_check_failed (const_tree node, const char *file,
9920 int line, const char *function, ...)
9922 va_list args;
9923 char *buffer;
9924 unsigned length = 0;
9925 enum tree_code code;
9927 va_start (args, function);
9928 while ((code = (enum tree_code) va_arg (args, int)))
9929 length += 4 + strlen (get_tree_code_name (code));
9930 va_end (args);
9931 va_start (args, function);
9932 buffer = (char *) alloca (length);
9933 length = 0;
9934 while ((code = (enum tree_code) va_arg (args, int)))
9936 if (length)
9938 strcpy (buffer + length, " or ");
9939 length += 4;
9941 strcpy (buffer + length, get_tree_code_name (code));
9942 length += strlen (get_tree_code_name (code));
9944 va_end (args);
9946 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9947 buffer, get_tree_code_name (TREE_CODE (node)),
9948 function, trim_filename (file), line);
9951 /* Similar to tree_check_failed, except that we check for a class of tree
9952 code, given in CL. */
9954 void
9955 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9956 const char *file, int line, const char *function)
9958 internal_error
9959 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9960 TREE_CODE_CLASS_STRING (cl),
9961 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9962 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9965 /* Similar to tree_check_failed, except that instead of specifying a
9966 dozen codes, use the knowledge that they're all sequential. */
9968 void
9969 tree_range_check_failed (const_tree node, const char *file, int line,
9970 const char *function, enum tree_code c1,
9971 enum tree_code c2)
9973 char *buffer;
9974 unsigned length = 0;
9975 unsigned int c;
9977 for (c = c1; c <= c2; ++c)
9978 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9980 length += strlen ("expected ");
9981 buffer = (char *) alloca (length);
9982 length = 0;
9984 for (c = c1; c <= c2; ++c)
9986 const char *prefix = length ? " or " : "expected ";
9988 strcpy (buffer + length, prefix);
9989 length += strlen (prefix);
9990 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9991 length += strlen (get_tree_code_name ((enum tree_code) c));
9994 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9995 buffer, get_tree_code_name (TREE_CODE (node)),
9996 function, trim_filename (file), line);
10000 /* Similar to tree_check_failed, except that we check that a tree does
10001 not have the specified code, given in CL. */
10003 void
10004 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
10005 const char *file, int line, const char *function)
10007 internal_error
10008 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
10009 TREE_CODE_CLASS_STRING (cl),
10010 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
10011 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10015 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
10017 void
10018 omp_clause_check_failed (const_tree node, const char *file, int line,
10019 const char *function, enum omp_clause_code code)
10021 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
10022 "in %s, at %s:%d",
10023 omp_clause_code_name[code],
10024 get_tree_code_name (TREE_CODE (node)),
10025 function, trim_filename (file), line);
10029 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
10031 void
10032 omp_clause_range_check_failed (const_tree node, const char *file, int line,
10033 const char *function, enum omp_clause_code c1,
10034 enum omp_clause_code c2)
10036 char *buffer;
10037 unsigned length = 0;
10038 unsigned int c;
10040 for (c = c1; c <= c2; ++c)
10041 length += 4 + strlen (omp_clause_code_name[c]);
10043 length += strlen ("expected ");
10044 buffer = (char *) alloca (length);
10045 length = 0;
10047 for (c = c1; c <= c2; ++c)
10049 const char *prefix = length ? " or " : "expected ";
10051 strcpy (buffer + length, prefix);
10052 length += strlen (prefix);
10053 strcpy (buffer + length, omp_clause_code_name[c]);
10054 length += strlen (omp_clause_code_name[c]);
10057 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10058 buffer, omp_clause_code_name[TREE_CODE (node)],
10059 function, trim_filename (file), line);
10063 #undef DEFTREESTRUCT
10064 #define DEFTREESTRUCT(VAL, NAME) NAME,
10066 static const char *ts_enum_names[] = {
10067 #include "treestruct.def"
10069 #undef DEFTREESTRUCT
10071 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
10073 /* Similar to tree_class_check_failed, except that we check for
10074 whether CODE contains the tree structure identified by EN. */
10076 void
10077 tree_contains_struct_check_failed (const_tree node,
10078 const enum tree_node_structure_enum en,
10079 const char *file, int line,
10080 const char *function)
10082 internal_error
10083 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
10084 TS_ENUM_NAME (en),
10085 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10089 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10090 (dynamically sized) vector. */
10092 void
10093 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10094 const char *function)
10096 internal_error
10097 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
10098 "at %s:%d",
10099 idx + 1, len, function, trim_filename (file), line);
10102 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10103 (dynamically sized) vector. */
10105 void
10106 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10107 const char *function)
10109 internal_error
10110 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
10111 idx + 1, len, function, trim_filename (file), line);
10114 /* Similar to above, except that the check is for the bounds of the operand
10115 vector of an expression node EXP. */
10117 void
10118 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10119 int line, const char *function)
10121 enum tree_code code = TREE_CODE (exp);
10122 internal_error
10123 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10124 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10125 function, trim_filename (file), line);
10128 /* Similar to above, except that the check is for the number of
10129 operands of an OMP_CLAUSE node. */
10131 void
10132 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10133 int line, const char *function)
10135 internal_error
10136 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
10137 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10138 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10139 trim_filename (file), line);
10141 #endif /* ENABLE_TREE_CHECKING */
10143 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
10144 and mapped to the machine mode MODE. Initialize its fields and build
10145 the information necessary for debugging output. */
10147 static tree
10148 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
10150 tree t;
10151 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10153 t = make_node (VECTOR_TYPE);
10154 TREE_TYPE (t) = mv_innertype;
10155 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10156 SET_TYPE_MODE (t, mode);
10158 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10159 SET_TYPE_STRUCTURAL_EQUALITY (t);
10160 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10161 || mode != VOIDmode)
10162 && !VECTOR_BOOLEAN_TYPE_P (t))
10163 TYPE_CANONICAL (t)
10164 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10166 layout_type (t);
10168 hashval_t hash = type_hash_canon_hash (t);
10169 t = type_hash_canon (hash, t);
10171 /* We have built a main variant, based on the main variant of the
10172 inner type. Use it to build the variant we return. */
10173 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10174 && TREE_TYPE (t) != innertype)
10175 return build_type_attribute_qual_variant (t,
10176 TYPE_ATTRIBUTES (innertype),
10177 TYPE_QUALS (innertype));
10179 return t;
10182 static tree
10183 make_or_reuse_type (unsigned size, int unsignedp)
10185 int i;
10187 if (size == INT_TYPE_SIZE)
10188 return unsignedp ? unsigned_type_node : integer_type_node;
10189 if (size == CHAR_TYPE_SIZE)
10190 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10191 if (size == SHORT_TYPE_SIZE)
10192 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10193 if (size == LONG_TYPE_SIZE)
10194 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10195 if (size == LONG_LONG_TYPE_SIZE)
10196 return (unsignedp ? long_long_unsigned_type_node
10197 : long_long_integer_type_node);
10199 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10200 if (size == int_n_data[i].bitsize
10201 && int_n_enabled_p[i])
10202 return (unsignedp ? int_n_trees[i].unsigned_type
10203 : int_n_trees[i].signed_type);
10205 if (unsignedp)
10206 return make_unsigned_type (size);
10207 else
10208 return make_signed_type (size);
10211 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10213 static tree
10214 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10216 if (satp)
10218 if (size == SHORT_FRACT_TYPE_SIZE)
10219 return unsignedp ? sat_unsigned_short_fract_type_node
10220 : sat_short_fract_type_node;
10221 if (size == FRACT_TYPE_SIZE)
10222 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10223 if (size == LONG_FRACT_TYPE_SIZE)
10224 return unsignedp ? sat_unsigned_long_fract_type_node
10225 : sat_long_fract_type_node;
10226 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10227 return unsignedp ? sat_unsigned_long_long_fract_type_node
10228 : sat_long_long_fract_type_node;
10230 else
10232 if (size == SHORT_FRACT_TYPE_SIZE)
10233 return unsignedp ? unsigned_short_fract_type_node
10234 : short_fract_type_node;
10235 if (size == FRACT_TYPE_SIZE)
10236 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10237 if (size == LONG_FRACT_TYPE_SIZE)
10238 return unsignedp ? unsigned_long_fract_type_node
10239 : long_fract_type_node;
10240 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10241 return unsignedp ? unsigned_long_long_fract_type_node
10242 : long_long_fract_type_node;
10245 return make_fract_type (size, unsignedp, satp);
10248 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10250 static tree
10251 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10253 if (satp)
10255 if (size == SHORT_ACCUM_TYPE_SIZE)
10256 return unsignedp ? sat_unsigned_short_accum_type_node
10257 : sat_short_accum_type_node;
10258 if (size == ACCUM_TYPE_SIZE)
10259 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10260 if (size == LONG_ACCUM_TYPE_SIZE)
10261 return unsignedp ? sat_unsigned_long_accum_type_node
10262 : sat_long_accum_type_node;
10263 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10264 return unsignedp ? sat_unsigned_long_long_accum_type_node
10265 : sat_long_long_accum_type_node;
10267 else
10269 if (size == SHORT_ACCUM_TYPE_SIZE)
10270 return unsignedp ? unsigned_short_accum_type_node
10271 : short_accum_type_node;
10272 if (size == ACCUM_TYPE_SIZE)
10273 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10274 if (size == LONG_ACCUM_TYPE_SIZE)
10275 return unsignedp ? unsigned_long_accum_type_node
10276 : long_accum_type_node;
10277 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10278 return unsignedp ? unsigned_long_long_accum_type_node
10279 : long_long_accum_type_node;
10282 return make_accum_type (size, unsignedp, satp);
10286 /* Create an atomic variant node for TYPE. This routine is called
10287 during initialization of data types to create the 5 basic atomic
10288 types. The generic build_variant_type function requires these to
10289 already be set up in order to function properly, so cannot be
10290 called from there. If ALIGN is non-zero, then ensure alignment is
10291 overridden to this value. */
10293 static tree
10294 build_atomic_base (tree type, unsigned int align)
10296 tree t;
10298 /* Make sure its not already registered. */
10299 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10300 return t;
10302 t = build_variant_type_copy (type);
10303 set_type_quals (t, TYPE_QUAL_ATOMIC);
10305 if (align)
10306 SET_TYPE_ALIGN (t, align);
10308 return t;
10311 /* Information about the _FloatN and _FloatNx types. This must be in
10312 the same order as the corresponding TI_* enum values. */
10313 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10315 { 16, false },
10316 { 32, false },
10317 { 64, false },
10318 { 128, false },
10319 { 32, true },
10320 { 64, true },
10321 { 128, true },
10325 /* Create nodes for all integer types (and error_mark_node) using the sizes
10326 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10328 void
10329 build_common_tree_nodes (bool signed_char)
10331 int i;
10333 error_mark_node = make_node (ERROR_MARK);
10334 TREE_TYPE (error_mark_node) = error_mark_node;
10336 initialize_sizetypes ();
10338 /* Define both `signed char' and `unsigned char'. */
10339 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10340 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10341 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10342 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10344 /* Define `char', which is like either `signed char' or `unsigned char'
10345 but not the same as either. */
10346 char_type_node
10347 = (signed_char
10348 ? make_signed_type (CHAR_TYPE_SIZE)
10349 : make_unsigned_type (CHAR_TYPE_SIZE));
10350 TYPE_STRING_FLAG (char_type_node) = 1;
10352 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10353 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10354 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10355 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10356 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10357 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10358 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10359 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10361 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10363 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10364 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10366 if (int_n_enabled_p[i])
10368 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10369 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10373 /* Define a boolean type. This type only represents boolean values but
10374 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10375 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10376 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10377 TYPE_PRECISION (boolean_type_node) = 1;
10378 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10380 /* Define what type to use for size_t. */
10381 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10382 size_type_node = unsigned_type_node;
10383 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10384 size_type_node = long_unsigned_type_node;
10385 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10386 size_type_node = long_long_unsigned_type_node;
10387 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10388 size_type_node = short_unsigned_type_node;
10389 else
10391 int i;
10393 size_type_node = NULL_TREE;
10394 for (i = 0; i < NUM_INT_N_ENTS; i++)
10395 if (int_n_enabled_p[i])
10397 char name[50], altname[50];
10398 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10399 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10401 if (strcmp (name, SIZE_TYPE) == 0
10402 || strcmp (altname, SIZE_TYPE) == 0)
10404 size_type_node = int_n_trees[i].unsigned_type;
10407 if (size_type_node == NULL_TREE)
10408 gcc_unreachable ();
10411 /* Define what type to use for ptrdiff_t. */
10412 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10413 ptrdiff_type_node = integer_type_node;
10414 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10415 ptrdiff_type_node = long_integer_type_node;
10416 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10417 ptrdiff_type_node = long_long_integer_type_node;
10418 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10419 ptrdiff_type_node = short_integer_type_node;
10420 else
10422 ptrdiff_type_node = NULL_TREE;
10423 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10424 if (int_n_enabled_p[i])
10426 char name[50], altname[50];
10427 sprintf (name, "__int%d", int_n_data[i].bitsize);
10428 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10430 if (strcmp (name, PTRDIFF_TYPE) == 0
10431 || strcmp (altname, PTRDIFF_TYPE) == 0)
10432 ptrdiff_type_node = int_n_trees[i].signed_type;
10434 if (ptrdiff_type_node == NULL_TREE)
10435 gcc_unreachable ();
10438 /* Fill in the rest of the sized types. Reuse existing type nodes
10439 when possible. */
10440 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10441 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10442 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10443 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10444 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10446 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10447 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10448 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10449 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10450 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10452 /* Don't call build_qualified type for atomics. That routine does
10453 special processing for atomics, and until they are initialized
10454 it's better not to make that call.
10456 Check to see if there is a target override for atomic types. */
10458 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10459 targetm.atomic_align_for_mode (QImode));
10460 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10461 targetm.atomic_align_for_mode (HImode));
10462 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10463 targetm.atomic_align_for_mode (SImode));
10464 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10465 targetm.atomic_align_for_mode (DImode));
10466 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10467 targetm.atomic_align_for_mode (TImode));
10469 access_public_node = get_identifier ("public");
10470 access_protected_node = get_identifier ("protected");
10471 access_private_node = get_identifier ("private");
10473 /* Define these next since types below may used them. */
10474 integer_zero_node = build_int_cst (integer_type_node, 0);
10475 integer_one_node = build_int_cst (integer_type_node, 1);
10476 integer_three_node = build_int_cst (integer_type_node, 3);
10477 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10479 size_zero_node = size_int (0);
10480 size_one_node = size_int (1);
10481 bitsize_zero_node = bitsize_int (0);
10482 bitsize_one_node = bitsize_int (1);
10483 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10485 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10486 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10488 void_type_node = make_node (VOID_TYPE);
10489 layout_type (void_type_node);
10491 /* We are not going to have real types in C with less than byte alignment,
10492 so we might as well not have any types that claim to have it. */
10493 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10494 TYPE_USER_ALIGN (void_type_node) = 0;
10496 void_node = make_node (VOID_CST);
10497 TREE_TYPE (void_node) = void_type_node;
10499 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10500 layout_type (TREE_TYPE (null_pointer_node));
10502 ptr_type_node = build_pointer_type (void_type_node);
10503 const_ptr_type_node
10504 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10505 for (unsigned i = 0;
10506 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10507 ++i)
10508 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10510 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10512 float_type_node = make_node (REAL_TYPE);
10513 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10514 layout_type (float_type_node);
10516 double_type_node = make_node (REAL_TYPE);
10517 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10518 layout_type (double_type_node);
10520 long_double_type_node = make_node (REAL_TYPE);
10521 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10522 layout_type (long_double_type_node);
10524 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10526 int n = floatn_nx_types[i].n;
10527 bool extended = floatn_nx_types[i].extended;
10528 scalar_float_mode mode;
10529 if (!targetm.floatn_mode (n, extended).exists (&mode))
10530 continue;
10531 int precision = GET_MODE_PRECISION (mode);
10532 /* Work around the rs6000 KFmode having precision 113 not
10533 128. */
10534 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10535 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10536 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10537 if (!extended)
10538 gcc_assert (min_precision == n);
10539 if (precision < min_precision)
10540 precision = min_precision;
10541 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10542 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10543 layout_type (FLOATN_NX_TYPE_NODE (i));
10544 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10547 float_ptr_type_node = build_pointer_type (float_type_node);
10548 double_ptr_type_node = build_pointer_type (double_type_node);
10549 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10550 integer_ptr_type_node = build_pointer_type (integer_type_node);
10552 /* Fixed size integer types. */
10553 uint16_type_node = make_or_reuse_type (16, 1);
10554 uint32_type_node = make_or_reuse_type (32, 1);
10555 uint64_type_node = make_or_reuse_type (64, 1);
10557 /* Decimal float types. */
10558 dfloat32_type_node = make_node (REAL_TYPE);
10559 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10560 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10561 layout_type (dfloat32_type_node);
10562 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10564 dfloat64_type_node = make_node (REAL_TYPE);
10565 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10566 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10567 layout_type (dfloat64_type_node);
10568 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10570 dfloat128_type_node = make_node (REAL_TYPE);
10571 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10572 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10573 layout_type (dfloat128_type_node);
10574 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10576 complex_integer_type_node = build_complex_type (integer_type_node, true);
10577 complex_float_type_node = build_complex_type (float_type_node, true);
10578 complex_double_type_node = build_complex_type (double_type_node, true);
10579 complex_long_double_type_node = build_complex_type (long_double_type_node,
10580 true);
10582 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10584 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10585 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10586 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10589 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10590 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10591 sat_ ## KIND ## _type_node = \
10592 make_sat_signed_ ## KIND ## _type (SIZE); \
10593 sat_unsigned_ ## KIND ## _type_node = \
10594 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10595 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10596 unsigned_ ## KIND ## _type_node = \
10597 make_unsigned_ ## KIND ## _type (SIZE);
10599 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10600 sat_ ## WIDTH ## KIND ## _type_node = \
10601 make_sat_signed_ ## KIND ## _type (SIZE); \
10602 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10603 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10604 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10605 unsigned_ ## WIDTH ## KIND ## _type_node = \
10606 make_unsigned_ ## KIND ## _type (SIZE);
10608 /* Make fixed-point type nodes based on four different widths. */
10609 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10610 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10611 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10612 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10613 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10615 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10616 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10617 NAME ## _type_node = \
10618 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10619 u ## NAME ## _type_node = \
10620 make_or_reuse_unsigned_ ## KIND ## _type \
10621 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10622 sat_ ## NAME ## _type_node = \
10623 make_or_reuse_sat_signed_ ## KIND ## _type \
10624 (GET_MODE_BITSIZE (MODE ## mode)); \
10625 sat_u ## NAME ## _type_node = \
10626 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10627 (GET_MODE_BITSIZE (U ## MODE ## mode));
10629 /* Fixed-point type and mode nodes. */
10630 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10631 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10632 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10633 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10634 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10635 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10636 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10637 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10638 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10639 MAKE_FIXED_MODE_NODE (accum, da, DA)
10640 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10643 tree t = targetm.build_builtin_va_list ();
10645 /* Many back-ends define record types without setting TYPE_NAME.
10646 If we copied the record type here, we'd keep the original
10647 record type without a name. This breaks name mangling. So,
10648 don't copy record types and let c_common_nodes_and_builtins()
10649 declare the type to be __builtin_va_list. */
10650 if (TREE_CODE (t) != RECORD_TYPE)
10651 t = build_variant_type_copy (t);
10653 va_list_type_node = t;
10656 /* SCEV analyzer global shared trees. */
10657 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10658 TREE_TYPE (chrec_dont_know) = void_type_node;
10659 chrec_known = make_node (SCEV_KNOWN);
10660 TREE_TYPE (chrec_known) = void_type_node;
10663 /* Modify DECL for given flags.
10664 TM_PURE attribute is set only on types, so the function will modify
10665 DECL's type when ECF_TM_PURE is used. */
10667 void
10668 set_call_expr_flags (tree decl, int flags)
10670 if (flags & ECF_NOTHROW)
10671 TREE_NOTHROW (decl) = 1;
10672 if (flags & ECF_CONST)
10673 TREE_READONLY (decl) = 1;
10674 if (flags & ECF_PURE)
10675 DECL_PURE_P (decl) = 1;
10676 if (flags & ECF_LOOPING_CONST_OR_PURE)
10677 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10678 if (flags & ECF_NOVOPS)
10679 DECL_IS_NOVOPS (decl) = 1;
10680 if (flags & ECF_NORETURN)
10681 TREE_THIS_VOLATILE (decl) = 1;
10682 if (flags & ECF_MALLOC)
10683 DECL_IS_MALLOC (decl) = 1;
10684 if (flags & ECF_RETURNS_TWICE)
10685 DECL_IS_RETURNS_TWICE (decl) = 1;
10686 if (flags & ECF_LEAF)
10687 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10688 NULL, DECL_ATTRIBUTES (decl));
10689 if (flags & ECF_COLD)
10690 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10691 NULL, DECL_ATTRIBUTES (decl));
10692 if (flags & ECF_RET1)
10693 DECL_ATTRIBUTES (decl)
10694 = tree_cons (get_identifier ("fn spec"),
10695 build_tree_list (NULL_TREE, build_string (1, "1")),
10696 DECL_ATTRIBUTES (decl));
10697 if ((flags & ECF_TM_PURE) && flag_tm)
10698 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10699 /* Looping const or pure is implied by noreturn.
10700 There is currently no way to declare looping const or looping pure alone. */
10701 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10702 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10706 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10708 static void
10709 local_define_builtin (const char *name, tree type, enum built_in_function code,
10710 const char *library_name, int ecf_flags)
10712 tree decl;
10714 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10715 library_name, NULL_TREE);
10716 set_call_expr_flags (decl, ecf_flags);
10718 set_builtin_decl (code, decl, true);
10721 /* Call this function after instantiating all builtins that the language
10722 front end cares about. This will build the rest of the builtins
10723 and internal functions that are relied upon by the tree optimizers and
10724 the middle-end. */
10726 void
10727 build_common_builtin_nodes (void)
10729 tree tmp, ftype;
10730 int ecf_flags;
10732 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10733 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10735 ftype = build_function_type (void_type_node, void_list_node);
10736 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10737 local_define_builtin ("__builtin_unreachable", ftype,
10738 BUILT_IN_UNREACHABLE,
10739 "__builtin_unreachable",
10740 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10741 | ECF_CONST | ECF_COLD);
10742 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10743 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10744 "abort",
10745 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10748 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10749 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10751 ftype = build_function_type_list (ptr_type_node,
10752 ptr_type_node, const_ptr_type_node,
10753 size_type_node, NULL_TREE);
10755 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10756 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10757 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10758 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10759 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10760 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10763 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10765 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10766 const_ptr_type_node, size_type_node,
10767 NULL_TREE);
10768 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10769 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10772 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10774 ftype = build_function_type_list (ptr_type_node,
10775 ptr_type_node, integer_type_node,
10776 size_type_node, NULL_TREE);
10777 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10778 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10781 /* If we're checking the stack, `alloca' can throw. */
10782 const int alloca_flags
10783 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10785 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10787 ftype = build_function_type_list (ptr_type_node,
10788 size_type_node, NULL_TREE);
10789 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10790 "alloca", alloca_flags);
10793 ftype = build_function_type_list (ptr_type_node, size_type_node,
10794 size_type_node, NULL_TREE);
10795 local_define_builtin ("__builtin_alloca_with_align", ftype,
10796 BUILT_IN_ALLOCA_WITH_ALIGN,
10797 "__builtin_alloca_with_align",
10798 alloca_flags);
10800 ftype = build_function_type_list (ptr_type_node, size_type_node,
10801 size_type_node, size_type_node, NULL_TREE);
10802 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10803 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10804 "__builtin_alloca_with_align_and_max",
10805 alloca_flags);
10807 ftype = build_function_type_list (void_type_node,
10808 ptr_type_node, ptr_type_node,
10809 ptr_type_node, NULL_TREE);
10810 local_define_builtin ("__builtin_init_trampoline", ftype,
10811 BUILT_IN_INIT_TRAMPOLINE,
10812 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10813 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10814 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10815 "__builtin_init_heap_trampoline",
10816 ECF_NOTHROW | ECF_LEAF);
10817 local_define_builtin ("__builtin_init_descriptor", ftype,
10818 BUILT_IN_INIT_DESCRIPTOR,
10819 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10821 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10822 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10823 BUILT_IN_ADJUST_TRAMPOLINE,
10824 "__builtin_adjust_trampoline",
10825 ECF_CONST | ECF_NOTHROW);
10826 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10827 BUILT_IN_ADJUST_DESCRIPTOR,
10828 "__builtin_adjust_descriptor",
10829 ECF_CONST | ECF_NOTHROW);
10831 ftype = build_function_type_list (void_type_node,
10832 ptr_type_node, ptr_type_node, NULL_TREE);
10833 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10834 BUILT_IN_NONLOCAL_GOTO,
10835 "__builtin_nonlocal_goto",
10836 ECF_NORETURN | ECF_NOTHROW);
10838 ftype = build_function_type_list (void_type_node,
10839 ptr_type_node, ptr_type_node, NULL_TREE);
10840 local_define_builtin ("__builtin_setjmp_setup", ftype,
10841 BUILT_IN_SETJMP_SETUP,
10842 "__builtin_setjmp_setup", ECF_NOTHROW);
10844 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10845 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10846 BUILT_IN_SETJMP_RECEIVER,
10847 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10849 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10850 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10851 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10853 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10854 local_define_builtin ("__builtin_stack_restore", ftype,
10855 BUILT_IN_STACK_RESTORE,
10856 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10858 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10859 const_ptr_type_node, size_type_node,
10860 NULL_TREE);
10861 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10862 "__builtin_memcmp_eq",
10863 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10865 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10866 "__builtin_strncmp_eq",
10867 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10869 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10870 "__builtin_strcmp_eq",
10871 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10873 /* If there's a possibility that we might use the ARM EABI, build the
10874 alternate __cxa_end_cleanup node used to resume from C++. */
10875 if (targetm.arm_eabi_unwinder)
10877 ftype = build_function_type_list (void_type_node, NULL_TREE);
10878 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10879 BUILT_IN_CXA_END_CLEANUP,
10880 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10883 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10884 local_define_builtin ("__builtin_unwind_resume", ftype,
10885 BUILT_IN_UNWIND_RESUME,
10886 ((targetm_common.except_unwind_info (&global_options)
10887 == UI_SJLJ)
10888 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10889 ECF_NORETURN);
10891 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10893 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10894 NULL_TREE);
10895 local_define_builtin ("__builtin_return_address", ftype,
10896 BUILT_IN_RETURN_ADDRESS,
10897 "__builtin_return_address",
10898 ECF_NOTHROW);
10901 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10902 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10904 ftype = build_function_type_list (void_type_node, ptr_type_node,
10905 ptr_type_node, NULL_TREE);
10906 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10907 local_define_builtin ("__cyg_profile_func_enter", ftype,
10908 BUILT_IN_PROFILE_FUNC_ENTER,
10909 "__cyg_profile_func_enter", 0);
10910 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10911 local_define_builtin ("__cyg_profile_func_exit", ftype,
10912 BUILT_IN_PROFILE_FUNC_EXIT,
10913 "__cyg_profile_func_exit", 0);
10916 /* The exception object and filter values from the runtime. The argument
10917 must be zero before exception lowering, i.e. from the front end. After
10918 exception lowering, it will be the region number for the exception
10919 landing pad. These functions are PURE instead of CONST to prevent
10920 them from being hoisted past the exception edge that will initialize
10921 its value in the landing pad. */
10922 ftype = build_function_type_list (ptr_type_node,
10923 integer_type_node, NULL_TREE);
10924 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10925 /* Only use TM_PURE if we have TM language support. */
10926 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10927 ecf_flags |= ECF_TM_PURE;
10928 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10929 "__builtin_eh_pointer", ecf_flags);
10931 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10932 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10933 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10934 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10936 ftype = build_function_type_list (void_type_node,
10937 integer_type_node, integer_type_node,
10938 NULL_TREE);
10939 local_define_builtin ("__builtin_eh_copy_values", ftype,
10940 BUILT_IN_EH_COPY_VALUES,
10941 "__builtin_eh_copy_values", ECF_NOTHROW);
10943 /* Complex multiplication and division. These are handled as builtins
10944 rather than optabs because emit_library_call_value doesn't support
10945 complex. Further, we can do slightly better with folding these
10946 beasties if the real and complex parts of the arguments are separate. */
10948 int mode;
10950 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10952 char mode_name_buf[4], *q;
10953 const char *p;
10954 enum built_in_function mcode, dcode;
10955 tree type, inner_type;
10956 const char *prefix = "__";
10958 if (targetm.libfunc_gnu_prefix)
10959 prefix = "__gnu_";
10961 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10962 if (type == NULL)
10963 continue;
10964 inner_type = TREE_TYPE (type);
10966 ftype = build_function_type_list (type, inner_type, inner_type,
10967 inner_type, inner_type, NULL_TREE);
10969 mcode = ((enum built_in_function)
10970 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10971 dcode = ((enum built_in_function)
10972 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10974 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10975 *q = TOLOWER (*p);
10976 *q = '\0';
10978 /* For -ftrapping-math these should throw from a former
10979 -fnon-call-exception stmt. */
10980 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10981 NULL);
10982 local_define_builtin (built_in_names[mcode], ftype, mcode,
10983 built_in_names[mcode],
10984 ECF_CONST | ECF_LEAF);
10986 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10987 NULL);
10988 local_define_builtin (built_in_names[dcode], ftype, dcode,
10989 built_in_names[dcode],
10990 ECF_CONST | ECF_LEAF);
10994 init_internal_fns ();
10997 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10998 better way.
11000 If we requested a pointer to a vector, build up the pointers that
11001 we stripped off while looking for the inner type. Similarly for
11002 return values from functions.
11004 The argument TYPE is the top of the chain, and BOTTOM is the
11005 new type which we will point to. */
11007 tree
11008 reconstruct_complex_type (tree type, tree bottom)
11010 tree inner, outer;
11012 if (TREE_CODE (type) == POINTER_TYPE)
11014 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11015 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
11016 TYPE_REF_CAN_ALIAS_ALL (type));
11018 else if (TREE_CODE (type) == REFERENCE_TYPE)
11020 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11021 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
11022 TYPE_REF_CAN_ALIAS_ALL (type));
11024 else if (TREE_CODE (type) == ARRAY_TYPE)
11026 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11027 outer = build_array_type (inner, TYPE_DOMAIN (type));
11029 else if (TREE_CODE (type) == FUNCTION_TYPE)
11031 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11032 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
11034 else if (TREE_CODE (type) == METHOD_TYPE)
11036 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11037 /* The build_method_type_directly() routine prepends 'this' to argument list,
11038 so we must compensate by getting rid of it. */
11039 outer
11040 = build_method_type_directly
11041 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
11042 inner,
11043 TREE_CHAIN (TYPE_ARG_TYPES (type)));
11045 else if (TREE_CODE (type) == OFFSET_TYPE)
11047 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11048 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
11050 else
11051 return bottom;
11053 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
11054 TYPE_QUALS (type));
11057 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
11058 the inner type. */
11059 tree
11060 build_vector_type_for_mode (tree innertype, machine_mode mode)
11062 poly_int64 nunits;
11063 unsigned int bitsize;
11065 switch (GET_MODE_CLASS (mode))
11067 case MODE_VECTOR_BOOL:
11068 case MODE_VECTOR_INT:
11069 case MODE_VECTOR_FLOAT:
11070 case MODE_VECTOR_FRACT:
11071 case MODE_VECTOR_UFRACT:
11072 case MODE_VECTOR_ACCUM:
11073 case MODE_VECTOR_UACCUM:
11074 nunits = GET_MODE_NUNITS (mode);
11075 break;
11077 case MODE_INT:
11078 /* Check that there are no leftover bits. */
11079 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
11080 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
11081 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
11082 break;
11084 default:
11085 gcc_unreachable ();
11088 return make_vector_type (innertype, nunits, mode);
11091 /* Similarly, but takes the inner type and number of units, which must be
11092 a power of two. */
11094 tree
11095 build_vector_type (tree innertype, poly_int64 nunits)
11097 return make_vector_type (innertype, nunits, VOIDmode);
11100 /* Build truth vector with specified length and number of units. */
11102 tree
11103 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
11105 machine_mode mask_mode
11106 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
11108 poly_uint64 vsize;
11109 if (mask_mode == BLKmode)
11110 vsize = vector_size * BITS_PER_UNIT;
11111 else
11112 vsize = GET_MODE_BITSIZE (mask_mode);
11114 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
11116 tree bool_type = build_nonstandard_boolean_type (esize);
11118 return make_vector_type (bool_type, nunits, mask_mode);
11121 /* Returns a vector type corresponding to a comparison of VECTYPE. */
11123 tree
11124 build_same_sized_truth_vector_type (tree vectype)
11126 if (VECTOR_BOOLEAN_TYPE_P (vectype))
11127 return vectype;
11129 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
11131 if (known_eq (size, 0U))
11132 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
11134 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
11137 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
11139 tree
11140 build_opaque_vector_type (tree innertype, poly_int64 nunits)
11142 tree t = make_vector_type (innertype, nunits, VOIDmode);
11143 tree cand;
11144 /* We always build the non-opaque variant before the opaque one,
11145 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
11146 cand = TYPE_NEXT_VARIANT (t);
11147 if (cand
11148 && TYPE_VECTOR_OPAQUE (cand)
11149 && check_qualified_type (cand, t, TYPE_QUALS (t)))
11150 return cand;
11151 /* Othewise build a variant type and make sure to queue it after
11152 the non-opaque type. */
11153 cand = build_distinct_type_copy (t);
11154 TYPE_VECTOR_OPAQUE (cand) = true;
11155 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11156 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11157 TYPE_NEXT_VARIANT (t) = cand;
11158 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11159 return cand;
11162 /* Return the value of element I of VECTOR_CST T as a wide_int. */
11164 wide_int
11165 vector_cst_int_elt (const_tree t, unsigned int i)
11167 /* First handle elements that are directly encoded. */
11168 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11169 if (i < encoded_nelts)
11170 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
11172 /* Identify the pattern that contains element I and work out the index of
11173 the last encoded element for that pattern. */
11174 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11175 unsigned int pattern = i % npatterns;
11176 unsigned int count = i / npatterns;
11177 unsigned int final_i = encoded_nelts - npatterns + pattern;
11179 /* If there are no steps, the final encoded value is the right one. */
11180 if (!VECTOR_CST_STEPPED_P (t))
11181 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11183 /* Otherwise work out the value from the last two encoded elements. */
11184 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11185 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11186 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11187 return wi::to_wide (v2) + (count - 2) * diff;
11190 /* Return the value of element I of VECTOR_CST T. */
11192 tree
11193 vector_cst_elt (const_tree t, unsigned int i)
11195 /* First handle elements that are directly encoded. */
11196 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11197 if (i < encoded_nelts)
11198 return VECTOR_CST_ENCODED_ELT (t, i);
11200 /* If there are no steps, the final encoded value is the right one. */
11201 if (!VECTOR_CST_STEPPED_P (t))
11203 /* Identify the pattern that contains element I and work out the index of
11204 the last encoded element for that pattern. */
11205 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11206 unsigned int pattern = i % npatterns;
11207 unsigned int final_i = encoded_nelts - npatterns + pattern;
11208 return VECTOR_CST_ENCODED_ELT (t, final_i);
11211 /* Otherwise work out the value from the last two encoded elements. */
11212 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11213 vector_cst_int_elt (t, i));
11216 /* Given an initializer INIT, return TRUE if INIT is zero or some
11217 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11218 null, set *NONZERO if and only if INIT is known not to be all
11219 zeros. The combination of return value of false and *NONZERO
11220 false implies that INIT may but need not be all zeros. Other
11221 combinations indicate definitive answers. */
11223 bool
11224 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11226 bool dummy;
11227 if (!nonzero)
11228 nonzero = &dummy;
11230 /* Conservatively clear NONZERO and set it only if INIT is definitely
11231 not all zero. */
11232 *nonzero = false;
11234 STRIP_NOPS (init);
11236 unsigned HOST_WIDE_INT off = 0;
11238 switch (TREE_CODE (init))
11240 case INTEGER_CST:
11241 if (integer_zerop (init))
11242 return true;
11244 *nonzero = true;
11245 return false;
11247 case REAL_CST:
11248 /* ??? Note that this is not correct for C4X float formats. There,
11249 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11250 negative exponent. */
11251 if (real_zerop (init)
11252 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11253 return true;
11255 *nonzero = true;
11256 return false;
11258 case FIXED_CST:
11259 if (fixed_zerop (init))
11260 return true;
11262 *nonzero = true;
11263 return false;
11265 case COMPLEX_CST:
11266 if (integer_zerop (init)
11267 || (real_zerop (init)
11268 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11269 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11270 return true;
11272 *nonzero = true;
11273 return false;
11275 case VECTOR_CST:
11276 if (VECTOR_CST_NPATTERNS (init) == 1
11277 && VECTOR_CST_DUPLICATE_P (init)
11278 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11279 return true;
11281 *nonzero = true;
11282 return false;
11284 case CONSTRUCTOR:
11286 if (TREE_CLOBBER_P (init))
11287 return false;
11289 unsigned HOST_WIDE_INT idx;
11290 tree elt;
11292 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11293 if (!initializer_zerop (elt, nonzero))
11294 return false;
11296 return true;
11299 case MEM_REF:
11301 tree arg = TREE_OPERAND (init, 0);
11302 if (TREE_CODE (arg) != ADDR_EXPR)
11303 return false;
11304 tree offset = TREE_OPERAND (init, 1);
11305 if (TREE_CODE (offset) != INTEGER_CST
11306 || !tree_fits_uhwi_p (offset))
11307 return false;
11308 off = tree_to_uhwi (offset);
11309 if (INT_MAX < off)
11310 return false;
11311 arg = TREE_OPERAND (arg, 0);
11312 if (TREE_CODE (arg) != STRING_CST)
11313 return false;
11314 init = arg;
11316 /* Fall through. */
11318 case STRING_CST:
11320 gcc_assert (off <= INT_MAX);
11322 int i = off;
11323 int n = TREE_STRING_LENGTH (init);
11324 if (n <= i)
11325 return false;
11327 /* We need to loop through all elements to handle cases like
11328 "\0" and "\0foobar". */
11329 for (i = 0; i < n; ++i)
11330 if (TREE_STRING_POINTER (init)[i] != '\0')
11332 *nonzero = true;
11333 return false;
11336 return true;
11339 default:
11340 return false;
11344 /* Return true if EXPR is an initializer expression in which every element
11345 is a constant that is numerically equal to 0 or 1. The elements do not
11346 need to be equal to each other. */
11348 bool
11349 initializer_each_zero_or_onep (const_tree expr)
11351 STRIP_ANY_LOCATION_WRAPPER (expr);
11353 switch (TREE_CODE (expr))
11355 case INTEGER_CST:
11356 return integer_zerop (expr) || integer_onep (expr);
11358 case REAL_CST:
11359 return real_zerop (expr) || real_onep (expr);
11361 case VECTOR_CST:
11363 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11364 if (VECTOR_CST_STEPPED_P (expr)
11365 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11366 return false;
11368 for (unsigned int i = 0; i < nelts; ++i)
11370 tree elt = vector_cst_elt (expr, i);
11371 if (!initializer_each_zero_or_onep (elt))
11372 return false;
11375 return true;
11378 default:
11379 return false;
11383 /* Given an initializer INIT for a TYPE, return true if INIT is zero
11384 so that it can be replaced by value initialization. This function
11385 distinguishes betwen empty strings as initializers for arrays and
11386 for pointers (which make it return false). */
11388 bool
11389 type_initializer_zero_p (tree type, tree init)
11391 if (type == error_mark_node || init == error_mark_node)
11392 return false;
11394 STRIP_NOPS (init);
11396 if (POINTER_TYPE_P (type))
11397 return TREE_CODE (init) != STRING_CST && initializer_zerop (init);
11399 if (TREE_CODE (init) != CONSTRUCTOR)
11400 return initializer_zerop (init);
11402 if (TREE_CODE (type) == ARRAY_TYPE)
11404 tree elt_type = TREE_TYPE (type);
11405 elt_type = TYPE_MAIN_VARIANT (elt_type);
11406 if (elt_type == char_type_node)
11407 return initializer_zerop (init);
11409 tree elt_init;
11410 unsigned HOST_WIDE_INT i;
11411 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, elt_init)
11412 if (!type_initializer_zero_p (elt_type, elt_init))
11413 return false;
11414 return true;
11417 if (TREE_CODE (type) != RECORD_TYPE)
11418 return initializer_zerop (init);
11420 tree fld = TYPE_FIELDS (type);
11422 tree fld_init;
11423 unsigned HOST_WIDE_INT i;
11424 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, fld_init)
11426 /* Advance to the next member, skipping over everything that
11427 canot be initialized (including unnamed bit-fields). */
11428 while (TREE_CODE (fld) != FIELD_DECL
11429 || DECL_ARTIFICIAL (fld)
11430 || (DECL_BIT_FIELD (fld) && !DECL_NAME (fld)))
11432 fld = DECL_CHAIN (fld);
11433 if (!fld)
11434 return true;
11435 continue;
11438 tree fldtype = TREE_TYPE (fld);
11439 if (!type_initializer_zero_p (fldtype, fld_init))
11440 return false;
11442 fld = DECL_CHAIN (fld);
11443 if (!fld)
11444 break;
11447 return true;
11450 /* Check if vector VEC consists of all the equal elements and
11451 that the number of elements corresponds to the type of VEC.
11452 The function returns first element of the vector
11453 or NULL_TREE if the vector is not uniform. */
11454 tree
11455 uniform_vector_p (const_tree vec)
11457 tree first, t;
11458 unsigned HOST_WIDE_INT i, nelts;
11460 if (vec == NULL_TREE)
11461 return NULL_TREE;
11463 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11465 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11466 return TREE_OPERAND (vec, 0);
11468 else if (TREE_CODE (vec) == VECTOR_CST)
11470 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11471 return VECTOR_CST_ENCODED_ELT (vec, 0);
11472 return NULL_TREE;
11475 else if (TREE_CODE (vec) == CONSTRUCTOR
11476 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11478 first = error_mark_node;
11480 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11482 if (i == 0)
11484 first = t;
11485 continue;
11487 if (!operand_equal_p (first, t, 0))
11488 return NULL_TREE;
11490 if (i != nelts)
11491 return NULL_TREE;
11493 return first;
11496 return NULL_TREE;
11499 /* If the argument is INTEGER_CST, return it. If the argument is vector
11500 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11501 return NULL_TREE.
11502 Look through location wrappers. */
11504 tree
11505 uniform_integer_cst_p (tree t)
11507 STRIP_ANY_LOCATION_WRAPPER (t);
11509 if (TREE_CODE (t) == INTEGER_CST)
11510 return t;
11512 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11514 t = uniform_vector_p (t);
11515 if (t && TREE_CODE (t) == INTEGER_CST)
11516 return t;
11519 return NULL_TREE;
11522 /* If VECTOR_CST T has a single nonzero element, return the index of that
11523 element, otherwise return -1. */
11526 single_nonzero_element (const_tree t)
11528 unsigned HOST_WIDE_INT nelts;
11529 unsigned int repeat_nelts;
11530 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11531 repeat_nelts = nelts;
11532 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11534 nelts = vector_cst_encoded_nelts (t);
11535 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11537 else
11538 return -1;
11540 int res = -1;
11541 for (unsigned int i = 0; i < nelts; ++i)
11543 tree elt = vector_cst_elt (t, i);
11544 if (!integer_zerop (elt) && !real_zerop (elt))
11546 if (res >= 0 || i >= repeat_nelts)
11547 return -1;
11548 res = i;
11551 return res;
11554 /* Build an empty statement at location LOC. */
11556 tree
11557 build_empty_stmt (location_t loc)
11559 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11560 SET_EXPR_LOCATION (t, loc);
11561 return t;
11565 /* Build an OpenMP clause with code CODE. LOC is the location of the
11566 clause. */
11568 tree
11569 build_omp_clause (location_t loc, enum omp_clause_code code)
11571 tree t;
11572 int size, length;
11574 length = omp_clause_num_ops[code];
11575 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11577 record_node_allocation_statistics (OMP_CLAUSE, size);
11579 t = (tree) ggc_internal_alloc (size);
11580 memset (t, 0, size);
11581 TREE_SET_CODE (t, OMP_CLAUSE);
11582 OMP_CLAUSE_SET_CODE (t, code);
11583 OMP_CLAUSE_LOCATION (t) = loc;
11585 return t;
11588 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11589 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11590 Except for the CODE and operand count field, other storage for the
11591 object is initialized to zeros. */
11593 tree
11594 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11596 tree t;
11597 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11599 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11600 gcc_assert (len >= 1);
11602 record_node_allocation_statistics (code, length);
11604 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11606 TREE_SET_CODE (t, code);
11608 /* Can't use TREE_OPERAND to store the length because if checking is
11609 enabled, it will try to check the length before we store it. :-P */
11610 t->exp.operands[0] = build_int_cst (sizetype, len);
11612 return t;
11615 /* Helper function for build_call_* functions; build a CALL_EXPR with
11616 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11617 the argument slots. */
11619 static tree
11620 build_call_1 (tree return_type, tree fn, int nargs)
11622 tree t;
11624 t = build_vl_exp (CALL_EXPR, nargs + 3);
11625 TREE_TYPE (t) = return_type;
11626 CALL_EXPR_FN (t) = fn;
11627 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11629 return t;
11632 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11633 FN and a null static chain slot. NARGS is the number of call arguments
11634 which are specified as "..." arguments. */
11636 tree
11637 build_call_nary (tree return_type, tree fn, int nargs, ...)
11639 tree ret;
11640 va_list args;
11641 va_start (args, nargs);
11642 ret = build_call_valist (return_type, fn, nargs, args);
11643 va_end (args);
11644 return ret;
11647 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11648 FN and a null static chain slot. NARGS is the number of call arguments
11649 which are specified as a va_list ARGS. */
11651 tree
11652 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11654 tree t;
11655 int i;
11657 t = build_call_1 (return_type, fn, nargs);
11658 for (i = 0; i < nargs; i++)
11659 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11660 process_call_operands (t);
11661 return t;
11664 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11665 FN and a null static chain slot. NARGS is the number of call arguments
11666 which are specified as a tree array ARGS. */
11668 tree
11669 build_call_array_loc (location_t loc, tree return_type, tree fn,
11670 int nargs, const tree *args)
11672 tree t;
11673 int i;
11675 t = build_call_1 (return_type, fn, nargs);
11676 for (i = 0; i < nargs; i++)
11677 CALL_EXPR_ARG (t, i) = args[i];
11678 process_call_operands (t);
11679 SET_EXPR_LOCATION (t, loc);
11680 return t;
11683 /* Like build_call_array, but takes a vec. */
11685 tree
11686 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11688 tree ret, t;
11689 unsigned int ix;
11691 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11692 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11693 CALL_EXPR_ARG (ret, ix) = t;
11694 process_call_operands (ret);
11695 return ret;
11698 /* Conveniently construct a function call expression. FNDECL names the
11699 function to be called and N arguments are passed in the array
11700 ARGARRAY. */
11702 tree
11703 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11705 tree fntype = TREE_TYPE (fndecl);
11706 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11708 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11711 /* Conveniently construct a function call expression. FNDECL names the
11712 function to be called and the arguments are passed in the vector
11713 VEC. */
11715 tree
11716 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11718 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11719 vec_safe_address (vec));
11723 /* Conveniently construct a function call expression. FNDECL names the
11724 function to be called, N is the number of arguments, and the "..."
11725 parameters are the argument expressions. */
11727 tree
11728 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11730 va_list ap;
11731 tree *argarray = XALLOCAVEC (tree, n);
11732 int i;
11734 va_start (ap, n);
11735 for (i = 0; i < n; i++)
11736 argarray[i] = va_arg (ap, tree);
11737 va_end (ap);
11738 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11741 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11742 varargs macros aren't supported by all bootstrap compilers. */
11744 tree
11745 build_call_expr (tree fndecl, int n, ...)
11747 va_list ap;
11748 tree *argarray = XALLOCAVEC (tree, n);
11749 int i;
11751 va_start (ap, n);
11752 for (i = 0; i < n; i++)
11753 argarray[i] = va_arg (ap, tree);
11754 va_end (ap);
11755 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11758 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11759 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11760 It will get gimplified later into an ordinary internal function. */
11762 tree
11763 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11764 tree type, int n, const tree *args)
11766 tree t = build_call_1 (type, NULL_TREE, n);
11767 for (int i = 0; i < n; ++i)
11768 CALL_EXPR_ARG (t, i) = args[i];
11769 SET_EXPR_LOCATION (t, loc);
11770 CALL_EXPR_IFN (t) = ifn;
11771 return t;
11774 /* Build internal call expression. This is just like CALL_EXPR, except
11775 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11776 internal function. */
11778 tree
11779 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11780 tree type, int n, ...)
11782 va_list ap;
11783 tree *argarray = XALLOCAVEC (tree, n);
11784 int i;
11786 va_start (ap, n);
11787 for (i = 0; i < n; i++)
11788 argarray[i] = va_arg (ap, tree);
11789 va_end (ap);
11790 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11793 /* Return a function call to FN, if the target is guaranteed to support it,
11794 or null otherwise.
11796 N is the number of arguments, passed in the "...", and TYPE is the
11797 type of the return value. */
11799 tree
11800 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11801 int n, ...)
11803 va_list ap;
11804 tree *argarray = XALLOCAVEC (tree, n);
11805 int i;
11807 va_start (ap, n);
11808 for (i = 0; i < n; i++)
11809 argarray[i] = va_arg (ap, tree);
11810 va_end (ap);
11811 if (internal_fn_p (fn))
11813 internal_fn ifn = as_internal_fn (fn);
11814 if (direct_internal_fn_p (ifn))
11816 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11817 if (!direct_internal_fn_supported_p (ifn, types,
11818 OPTIMIZE_FOR_BOTH))
11819 return NULL_TREE;
11821 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11823 else
11825 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11826 if (!fndecl)
11827 return NULL_TREE;
11828 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11832 /* Return a function call to the appropriate builtin alloca variant.
11834 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11835 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11836 bound for SIZE in case it is not a fixed value. */
11838 tree
11839 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11841 if (max_size >= 0)
11843 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11844 return
11845 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11847 else if (align > 0)
11849 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11850 return build_call_expr (t, 2, size, size_int (align));
11852 else
11854 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11855 return build_call_expr (t, 1, size);
11859 /* Create a new constant string literal consisting of elements of type
11860 ELTYPE and return a tree node representing char* pointer to it as
11861 an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value is
11862 the LEN bytes at STR (the representation of the string, which may
11863 be wide). */
11865 tree
11866 build_string_literal (int len, const char *str,
11867 tree eltype /* = char_type_node */)
11869 tree t = build_string (len, str);
11870 tree index = build_index_type (size_int (len - 1));
11871 eltype = build_type_variant (eltype, 1, 0);
11872 tree type = build_array_type (eltype, index);
11873 TREE_TYPE (t) = type;
11874 TREE_CONSTANT (t) = 1;
11875 TREE_READONLY (t) = 1;
11876 TREE_STATIC (t) = 1;
11878 type = build_pointer_type (eltype);
11879 t = build1 (ADDR_EXPR, type,
11880 build4 (ARRAY_REF, eltype,
11881 t, integer_zero_node, NULL_TREE, NULL_TREE));
11882 return t;
11887 /* Return true if T (assumed to be a DECL) must be assigned a memory
11888 location. */
11890 bool
11891 needs_to_live_in_memory (const_tree t)
11893 return (TREE_ADDRESSABLE (t)
11894 || is_global_var (t)
11895 || (TREE_CODE (t) == RESULT_DECL
11896 && !DECL_BY_REFERENCE (t)
11897 && aggregate_value_p (t, current_function_decl)));
11900 /* Return value of a constant X and sign-extend it. */
11902 HOST_WIDE_INT
11903 int_cst_value (const_tree x)
11905 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11906 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11908 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11909 gcc_assert (cst_and_fits_in_hwi (x));
11911 if (bits < HOST_BITS_PER_WIDE_INT)
11913 bool negative = ((val >> (bits - 1)) & 1) != 0;
11914 if (negative)
11915 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11916 else
11917 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11920 return val;
11923 /* If TYPE is an integral or pointer type, return an integer type with
11924 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11925 if TYPE is already an integer type of signedness UNSIGNEDP.
11926 If TYPE is a floating-point type, return an integer type with the same
11927 bitsize and with the signedness given by UNSIGNEDP; this is useful
11928 when doing bit-level operations on a floating-point value. */
11930 tree
11931 signed_or_unsigned_type_for (int unsignedp, tree type)
11933 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11934 return type;
11936 if (TREE_CODE (type) == VECTOR_TYPE)
11938 tree inner = TREE_TYPE (type);
11939 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11940 if (!inner2)
11941 return NULL_TREE;
11942 if (inner == inner2)
11943 return type;
11944 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11947 if (TREE_CODE (type) == COMPLEX_TYPE)
11949 tree inner = TREE_TYPE (type);
11950 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11951 if (!inner2)
11952 return NULL_TREE;
11953 if (inner == inner2)
11954 return type;
11955 return build_complex_type (inner2);
11958 unsigned int bits;
11959 if (INTEGRAL_TYPE_P (type)
11960 || POINTER_TYPE_P (type)
11961 || TREE_CODE (type) == OFFSET_TYPE)
11962 bits = TYPE_PRECISION (type);
11963 else if (TREE_CODE (type) == REAL_TYPE)
11964 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11965 else
11966 return NULL_TREE;
11968 return build_nonstandard_integer_type (bits, unsignedp);
11971 /* If TYPE is an integral or pointer type, return an integer type with
11972 the same precision which is unsigned, or itself if TYPE is already an
11973 unsigned integer type. If TYPE is a floating-point type, return an
11974 unsigned integer type with the same bitsize as TYPE. */
11976 tree
11977 unsigned_type_for (tree type)
11979 return signed_or_unsigned_type_for (1, type);
11982 /* If TYPE is an integral or pointer type, return an integer type with
11983 the same precision which is signed, or itself if TYPE is already a
11984 signed integer type. If TYPE is a floating-point type, return a
11985 signed integer type with the same bitsize as TYPE. */
11987 tree
11988 signed_type_for (tree type)
11990 return signed_or_unsigned_type_for (0, type);
11993 /* If TYPE is a vector type, return a signed integer vector type with the
11994 same width and number of subparts. Otherwise return boolean_type_node. */
11996 tree
11997 truth_type_for (tree type)
11999 if (TREE_CODE (type) == VECTOR_TYPE)
12001 if (VECTOR_BOOLEAN_TYPE_P (type))
12002 return type;
12003 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
12004 GET_MODE_SIZE (TYPE_MODE (type)));
12006 else
12007 return boolean_type_node;
12010 /* Returns the largest value obtainable by casting something in INNER type to
12011 OUTER type. */
12013 tree
12014 upper_bound_in_type (tree outer, tree inner)
12016 unsigned int det = 0;
12017 unsigned oprec = TYPE_PRECISION (outer);
12018 unsigned iprec = TYPE_PRECISION (inner);
12019 unsigned prec;
12021 /* Compute a unique number for every combination. */
12022 det |= (oprec > iprec) ? 4 : 0;
12023 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
12024 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
12026 /* Determine the exponent to use. */
12027 switch (det)
12029 case 0:
12030 case 1:
12031 /* oprec <= iprec, outer: signed, inner: don't care. */
12032 prec = oprec - 1;
12033 break;
12034 case 2:
12035 case 3:
12036 /* oprec <= iprec, outer: unsigned, inner: don't care. */
12037 prec = oprec;
12038 break;
12039 case 4:
12040 /* oprec > iprec, outer: signed, inner: signed. */
12041 prec = iprec - 1;
12042 break;
12043 case 5:
12044 /* oprec > iprec, outer: signed, inner: unsigned. */
12045 prec = iprec;
12046 break;
12047 case 6:
12048 /* oprec > iprec, outer: unsigned, inner: signed. */
12049 prec = oprec;
12050 break;
12051 case 7:
12052 /* oprec > iprec, outer: unsigned, inner: unsigned. */
12053 prec = iprec;
12054 break;
12055 default:
12056 gcc_unreachable ();
12059 return wide_int_to_tree (outer,
12060 wi::mask (prec, false, TYPE_PRECISION (outer)));
12063 /* Returns the smallest value obtainable by casting something in INNER type to
12064 OUTER type. */
12066 tree
12067 lower_bound_in_type (tree outer, tree inner)
12069 unsigned oprec = TYPE_PRECISION (outer);
12070 unsigned iprec = TYPE_PRECISION (inner);
12072 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
12073 and obtain 0. */
12074 if (TYPE_UNSIGNED (outer)
12075 /* If we are widening something of an unsigned type, OUTER type
12076 contains all values of INNER type. In particular, both INNER
12077 and OUTER types have zero in common. */
12078 || (oprec > iprec && TYPE_UNSIGNED (inner)))
12079 return build_int_cst (outer, 0);
12080 else
12082 /* If we are widening a signed type to another signed type, we
12083 want to obtain -2^^(iprec-1). If we are keeping the
12084 precision or narrowing to a signed type, we want to obtain
12085 -2^(oprec-1). */
12086 unsigned prec = oprec > iprec ? iprec : oprec;
12087 return wide_int_to_tree (outer,
12088 wi::mask (prec - 1, true,
12089 TYPE_PRECISION (outer)));
12093 /* Return nonzero if two operands that are suitable for PHI nodes are
12094 necessarily equal. Specifically, both ARG0 and ARG1 must be either
12095 SSA_NAME or invariant. Note that this is strictly an optimization.
12096 That is, callers of this function can directly call operand_equal_p
12097 and get the same result, only slower. */
12100 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
12102 if (arg0 == arg1)
12103 return 1;
12104 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
12105 return 0;
12106 return operand_equal_p (arg0, arg1, 0);
12109 /* Returns number of zeros at the end of binary representation of X. */
12111 tree
12112 num_ending_zeros (const_tree x)
12114 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
12118 #define WALK_SUBTREE(NODE) \
12119 do \
12121 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
12122 if (result) \
12123 return result; \
12125 while (0)
12127 /* This is a subroutine of walk_tree that walks field of TYPE that are to
12128 be walked whenever a type is seen in the tree. Rest of operands and return
12129 value are as for walk_tree. */
12131 static tree
12132 walk_type_fields (tree type, walk_tree_fn func, void *data,
12133 hash_set<tree> *pset, walk_tree_lh lh)
12135 tree result = NULL_TREE;
12137 switch (TREE_CODE (type))
12139 case POINTER_TYPE:
12140 case REFERENCE_TYPE:
12141 case VECTOR_TYPE:
12142 /* We have to worry about mutually recursive pointers. These can't
12143 be written in C. They can in Ada. It's pathological, but
12144 there's an ACATS test (c38102a) that checks it. Deal with this
12145 by checking if we're pointing to another pointer, that one
12146 points to another pointer, that one does too, and we have no htab.
12147 If so, get a hash table. We check three levels deep to avoid
12148 the cost of the hash table if we don't need one. */
12149 if (POINTER_TYPE_P (TREE_TYPE (type))
12150 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
12151 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
12152 && !pset)
12154 result = walk_tree_without_duplicates (&TREE_TYPE (type),
12155 func, data);
12156 if (result)
12157 return result;
12159 break;
12162 /* fall through */
12164 case COMPLEX_TYPE:
12165 WALK_SUBTREE (TREE_TYPE (type));
12166 break;
12168 case METHOD_TYPE:
12169 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
12171 /* Fall through. */
12173 case FUNCTION_TYPE:
12174 WALK_SUBTREE (TREE_TYPE (type));
12176 tree arg;
12178 /* We never want to walk into default arguments. */
12179 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
12180 WALK_SUBTREE (TREE_VALUE (arg));
12182 break;
12184 case ARRAY_TYPE:
12185 /* Don't follow this nodes's type if a pointer for fear that
12186 we'll have infinite recursion. If we have a PSET, then we
12187 need not fear. */
12188 if (pset
12189 || (!POINTER_TYPE_P (TREE_TYPE (type))
12190 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
12191 WALK_SUBTREE (TREE_TYPE (type));
12192 WALK_SUBTREE (TYPE_DOMAIN (type));
12193 break;
12195 case OFFSET_TYPE:
12196 WALK_SUBTREE (TREE_TYPE (type));
12197 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
12198 break;
12200 default:
12201 break;
12204 return NULL_TREE;
12207 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
12208 called with the DATA and the address of each sub-tree. If FUNC returns a
12209 non-NULL value, the traversal is stopped, and the value returned by FUNC
12210 is returned. If PSET is non-NULL it is used to record the nodes visited,
12211 and to avoid visiting a node more than once. */
12213 tree
12214 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12215 hash_set<tree> *pset, walk_tree_lh lh)
12217 enum tree_code code;
12218 int walk_subtrees;
12219 tree result;
12221 #define WALK_SUBTREE_TAIL(NODE) \
12222 do \
12224 tp = & (NODE); \
12225 goto tail_recurse; \
12227 while (0)
12229 tail_recurse:
12230 /* Skip empty subtrees. */
12231 if (!*tp)
12232 return NULL_TREE;
12234 /* Don't walk the same tree twice, if the user has requested
12235 that we avoid doing so. */
12236 if (pset && pset->add (*tp))
12237 return NULL_TREE;
12239 /* Call the function. */
12240 walk_subtrees = 1;
12241 result = (*func) (tp, &walk_subtrees, data);
12243 /* If we found something, return it. */
12244 if (result)
12245 return result;
12247 code = TREE_CODE (*tp);
12249 /* Even if we didn't, FUNC may have decided that there was nothing
12250 interesting below this point in the tree. */
12251 if (!walk_subtrees)
12253 /* But we still need to check our siblings. */
12254 if (code == TREE_LIST)
12255 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12256 else if (code == OMP_CLAUSE)
12257 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12258 else
12259 return NULL_TREE;
12262 if (lh)
12264 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12265 if (result || !walk_subtrees)
12266 return result;
12269 switch (code)
12271 case ERROR_MARK:
12272 case IDENTIFIER_NODE:
12273 case INTEGER_CST:
12274 case REAL_CST:
12275 case FIXED_CST:
12276 case VECTOR_CST:
12277 case STRING_CST:
12278 case BLOCK:
12279 case PLACEHOLDER_EXPR:
12280 case SSA_NAME:
12281 case FIELD_DECL:
12282 case RESULT_DECL:
12283 /* None of these have subtrees other than those already walked
12284 above. */
12285 break;
12287 case TREE_LIST:
12288 WALK_SUBTREE (TREE_VALUE (*tp));
12289 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12290 break;
12292 case TREE_VEC:
12294 int len = TREE_VEC_LENGTH (*tp);
12296 if (len == 0)
12297 break;
12299 /* Walk all elements but the first. */
12300 while (--len)
12301 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12303 /* Now walk the first one as a tail call. */
12304 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12307 case COMPLEX_CST:
12308 WALK_SUBTREE (TREE_REALPART (*tp));
12309 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12311 case CONSTRUCTOR:
12313 unsigned HOST_WIDE_INT idx;
12314 constructor_elt *ce;
12316 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12317 idx++)
12318 WALK_SUBTREE (ce->value);
12320 break;
12322 case SAVE_EXPR:
12323 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12325 case BIND_EXPR:
12327 tree decl;
12328 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12330 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12331 into declarations that are just mentioned, rather than
12332 declared; they don't really belong to this part of the tree.
12333 And, we can see cycles: the initializer for a declaration
12334 can refer to the declaration itself. */
12335 WALK_SUBTREE (DECL_INITIAL (decl));
12336 WALK_SUBTREE (DECL_SIZE (decl));
12337 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12339 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12342 case STATEMENT_LIST:
12344 tree_stmt_iterator i;
12345 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12346 WALK_SUBTREE (*tsi_stmt_ptr (i));
12348 break;
12350 case OMP_CLAUSE:
12351 switch (OMP_CLAUSE_CODE (*tp))
12353 case OMP_CLAUSE_GANG:
12354 case OMP_CLAUSE__GRIDDIM_:
12355 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12356 /* FALLTHRU */
12358 case OMP_CLAUSE_ASYNC:
12359 case OMP_CLAUSE_WAIT:
12360 case OMP_CLAUSE_WORKER:
12361 case OMP_CLAUSE_VECTOR:
12362 case OMP_CLAUSE_NUM_GANGS:
12363 case OMP_CLAUSE_NUM_WORKERS:
12364 case OMP_CLAUSE_VECTOR_LENGTH:
12365 case OMP_CLAUSE_PRIVATE:
12366 case OMP_CLAUSE_SHARED:
12367 case OMP_CLAUSE_FIRSTPRIVATE:
12368 case OMP_CLAUSE_COPYIN:
12369 case OMP_CLAUSE_COPYPRIVATE:
12370 case OMP_CLAUSE_FINAL:
12371 case OMP_CLAUSE_IF:
12372 case OMP_CLAUSE_NUM_THREADS:
12373 case OMP_CLAUSE_SCHEDULE:
12374 case OMP_CLAUSE_UNIFORM:
12375 case OMP_CLAUSE_DEPEND:
12376 case OMP_CLAUSE_NONTEMPORAL:
12377 case OMP_CLAUSE_NUM_TEAMS:
12378 case OMP_CLAUSE_THREAD_LIMIT:
12379 case OMP_CLAUSE_DEVICE:
12380 case OMP_CLAUSE_DIST_SCHEDULE:
12381 case OMP_CLAUSE_SAFELEN:
12382 case OMP_CLAUSE_SIMDLEN:
12383 case OMP_CLAUSE_ORDERED:
12384 case OMP_CLAUSE_PRIORITY:
12385 case OMP_CLAUSE_GRAINSIZE:
12386 case OMP_CLAUSE_NUM_TASKS:
12387 case OMP_CLAUSE_HINT:
12388 case OMP_CLAUSE_TO_DECLARE:
12389 case OMP_CLAUSE_LINK:
12390 case OMP_CLAUSE_USE_DEVICE_PTR:
12391 case OMP_CLAUSE_USE_DEVICE_ADDR:
12392 case OMP_CLAUSE_IS_DEVICE_PTR:
12393 case OMP_CLAUSE_INCLUSIVE:
12394 case OMP_CLAUSE_EXCLUSIVE:
12395 case OMP_CLAUSE__LOOPTEMP_:
12396 case OMP_CLAUSE__REDUCTEMP_:
12397 case OMP_CLAUSE__CONDTEMP_:
12398 case OMP_CLAUSE__SCANTEMP_:
12399 case OMP_CLAUSE__SIMDUID_:
12400 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12401 /* FALLTHRU */
12403 case OMP_CLAUSE_INDEPENDENT:
12404 case OMP_CLAUSE_NOWAIT:
12405 case OMP_CLAUSE_DEFAULT:
12406 case OMP_CLAUSE_UNTIED:
12407 case OMP_CLAUSE_MERGEABLE:
12408 case OMP_CLAUSE_PROC_BIND:
12409 case OMP_CLAUSE_DEVICE_TYPE:
12410 case OMP_CLAUSE_INBRANCH:
12411 case OMP_CLAUSE_NOTINBRANCH:
12412 case OMP_CLAUSE_FOR:
12413 case OMP_CLAUSE_PARALLEL:
12414 case OMP_CLAUSE_SECTIONS:
12415 case OMP_CLAUSE_TASKGROUP:
12416 case OMP_CLAUSE_NOGROUP:
12417 case OMP_CLAUSE_THREADS:
12418 case OMP_CLAUSE_SIMD:
12419 case OMP_CLAUSE_DEFAULTMAP:
12420 case OMP_CLAUSE_ORDER:
12421 case OMP_CLAUSE_BIND:
12422 case OMP_CLAUSE_AUTO:
12423 case OMP_CLAUSE_SEQ:
12424 case OMP_CLAUSE_TILE:
12425 case OMP_CLAUSE__SIMT_:
12426 case OMP_CLAUSE_IF_PRESENT:
12427 case OMP_CLAUSE_FINALIZE:
12428 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12430 case OMP_CLAUSE_LASTPRIVATE:
12431 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12432 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12433 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12435 case OMP_CLAUSE_COLLAPSE:
12437 int i;
12438 for (i = 0; i < 3; i++)
12439 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12440 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12443 case OMP_CLAUSE_LINEAR:
12444 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12445 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12446 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12447 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12449 case OMP_CLAUSE_ALIGNED:
12450 case OMP_CLAUSE_FROM:
12451 case OMP_CLAUSE_TO:
12452 case OMP_CLAUSE_MAP:
12453 case OMP_CLAUSE__CACHE_:
12454 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12455 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12456 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12458 case OMP_CLAUSE_REDUCTION:
12459 case OMP_CLAUSE_TASK_REDUCTION:
12460 case OMP_CLAUSE_IN_REDUCTION:
12462 int i;
12463 for (i = 0; i < 5; i++)
12464 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12465 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12468 default:
12469 gcc_unreachable ();
12471 break;
12473 case TARGET_EXPR:
12475 int i, len;
12477 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12478 But, we only want to walk once. */
12479 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12480 for (i = 0; i < len; ++i)
12481 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12482 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12485 case DECL_EXPR:
12486 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12487 defining. We only want to walk into these fields of a type in this
12488 case and not in the general case of a mere reference to the type.
12490 The criterion is as follows: if the field can be an expression, it
12491 must be walked only here. This should be in keeping with the fields
12492 that are directly gimplified in gimplify_type_sizes in order for the
12493 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12494 variable-sized types.
12496 Note that DECLs get walked as part of processing the BIND_EXPR. */
12497 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12499 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12500 if (TREE_CODE (*type_p) == ERROR_MARK)
12501 return NULL_TREE;
12503 /* Call the function for the type. See if it returns anything or
12504 doesn't want us to continue. If we are to continue, walk both
12505 the normal fields and those for the declaration case. */
12506 result = (*func) (type_p, &walk_subtrees, data);
12507 if (result || !walk_subtrees)
12508 return result;
12510 /* But do not walk a pointed-to type since it may itself need to
12511 be walked in the declaration case if it isn't anonymous. */
12512 if (!POINTER_TYPE_P (*type_p))
12514 result = walk_type_fields (*type_p, func, data, pset, lh);
12515 if (result)
12516 return result;
12519 /* If this is a record type, also walk the fields. */
12520 if (RECORD_OR_UNION_TYPE_P (*type_p))
12522 tree field;
12524 for (field = TYPE_FIELDS (*type_p); field;
12525 field = DECL_CHAIN (field))
12527 /* We'd like to look at the type of the field, but we can
12528 easily get infinite recursion. So assume it's pointed
12529 to elsewhere in the tree. Also, ignore things that
12530 aren't fields. */
12531 if (TREE_CODE (field) != FIELD_DECL)
12532 continue;
12534 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12535 WALK_SUBTREE (DECL_SIZE (field));
12536 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12537 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12538 WALK_SUBTREE (DECL_QUALIFIER (field));
12542 /* Same for scalar types. */
12543 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12544 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12545 || TREE_CODE (*type_p) == INTEGER_TYPE
12546 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12547 || TREE_CODE (*type_p) == REAL_TYPE)
12549 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12550 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12553 WALK_SUBTREE (TYPE_SIZE (*type_p));
12554 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12556 /* FALLTHRU */
12558 default:
12559 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12561 int i, len;
12563 /* Walk over all the sub-trees of this operand. */
12564 len = TREE_OPERAND_LENGTH (*tp);
12566 /* Go through the subtrees. We need to do this in forward order so
12567 that the scope of a FOR_EXPR is handled properly. */
12568 if (len)
12570 for (i = 0; i < len - 1; ++i)
12571 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12572 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12575 /* If this is a type, walk the needed fields in the type. */
12576 else if (TYPE_P (*tp))
12577 return walk_type_fields (*tp, func, data, pset, lh);
12578 break;
12581 /* We didn't find what we were looking for. */
12582 return NULL_TREE;
12584 #undef WALK_SUBTREE_TAIL
12586 #undef WALK_SUBTREE
12588 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12590 tree
12591 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12592 walk_tree_lh lh)
12594 tree result;
12596 hash_set<tree> pset;
12597 result = walk_tree_1 (tp, func, data, &pset, lh);
12598 return result;
12602 tree
12603 tree_block (tree t)
12605 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12607 if (IS_EXPR_CODE_CLASS (c))
12608 return LOCATION_BLOCK (t->exp.locus);
12609 gcc_unreachable ();
12610 return NULL;
12613 void
12614 tree_set_block (tree t, tree b)
12616 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12618 if (IS_EXPR_CODE_CLASS (c))
12620 t->exp.locus = set_block (t->exp.locus, b);
12622 else
12623 gcc_unreachable ();
12626 /* Create a nameless artificial label and put it in the current
12627 function context. The label has a location of LOC. Returns the
12628 newly created label. */
12630 tree
12631 create_artificial_label (location_t loc)
12633 tree lab = build_decl (loc,
12634 LABEL_DECL, NULL_TREE, void_type_node);
12636 DECL_ARTIFICIAL (lab) = 1;
12637 DECL_IGNORED_P (lab) = 1;
12638 DECL_CONTEXT (lab) = current_function_decl;
12639 return lab;
12642 /* Given a tree, try to return a useful variable name that we can use
12643 to prefix a temporary that is being assigned the value of the tree.
12644 I.E. given <temp> = &A, return A. */
12646 const char *
12647 get_name (tree t)
12649 tree stripped_decl;
12651 stripped_decl = t;
12652 STRIP_NOPS (stripped_decl);
12653 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12654 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12655 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12657 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12658 if (!name)
12659 return NULL;
12660 return IDENTIFIER_POINTER (name);
12662 else
12664 switch (TREE_CODE (stripped_decl))
12666 case ADDR_EXPR:
12667 return get_name (TREE_OPERAND (stripped_decl, 0));
12668 default:
12669 return NULL;
12674 /* Return true if TYPE has a variable argument list. */
12676 bool
12677 stdarg_p (const_tree fntype)
12679 function_args_iterator args_iter;
12680 tree n = NULL_TREE, t;
12682 if (!fntype)
12683 return false;
12685 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12687 n = t;
12690 return n != NULL_TREE && n != void_type_node;
12693 /* Return true if TYPE has a prototype. */
12695 bool
12696 prototype_p (const_tree fntype)
12698 tree t;
12700 gcc_assert (fntype != NULL_TREE);
12702 t = TYPE_ARG_TYPES (fntype);
12703 return (t != NULL_TREE);
12706 /* If BLOCK is inlined from an __attribute__((__artificial__))
12707 routine, return pointer to location from where it has been
12708 called. */
12709 location_t *
12710 block_nonartificial_location (tree block)
12712 location_t *ret = NULL;
12714 while (block && TREE_CODE (block) == BLOCK
12715 && BLOCK_ABSTRACT_ORIGIN (block))
12717 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12718 if (TREE_CODE (ao) == FUNCTION_DECL)
12720 /* If AO is an artificial inline, point RET to the
12721 call site locus at which it has been inlined and continue
12722 the loop, in case AO's caller is also an artificial
12723 inline. */
12724 if (DECL_DECLARED_INLINE_P (ao)
12725 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12726 ret = &BLOCK_SOURCE_LOCATION (block);
12727 else
12728 break;
12730 else if (TREE_CODE (ao) != BLOCK)
12731 break;
12733 block = BLOCK_SUPERCONTEXT (block);
12735 return ret;
12739 /* If EXP is inlined from an __attribute__((__artificial__))
12740 function, return the location of the original call expression. */
12742 location_t
12743 tree_nonartificial_location (tree exp)
12745 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12747 if (loc)
12748 return *loc;
12749 else
12750 return EXPR_LOCATION (exp);
12754 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12755 nodes. */
12757 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12759 hashval_t
12760 cl_option_hasher::hash (tree x)
12762 const_tree const t = x;
12763 const char *p;
12764 size_t i;
12765 size_t len = 0;
12766 hashval_t hash = 0;
12768 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12770 p = (const char *)TREE_OPTIMIZATION (t);
12771 len = sizeof (struct cl_optimization);
12774 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12775 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12777 else
12778 gcc_unreachable ();
12780 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12781 something else. */
12782 for (i = 0; i < len; i++)
12783 if (p[i])
12784 hash = (hash << 4) ^ ((i << 2) | p[i]);
12786 return hash;
12789 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12790 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12791 same. */
12793 bool
12794 cl_option_hasher::equal (tree x, tree y)
12796 const_tree const xt = x;
12797 const_tree const yt = y;
12799 if (TREE_CODE (xt) != TREE_CODE (yt))
12800 return 0;
12802 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12803 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12804 TREE_OPTIMIZATION (yt));
12805 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12806 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12807 TREE_TARGET_OPTION (yt));
12808 else
12809 gcc_unreachable ();
12812 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12814 tree
12815 build_optimization_node (struct gcc_options *opts)
12817 tree t;
12819 /* Use the cache of optimization nodes. */
12821 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12822 opts);
12824 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12825 t = *slot;
12826 if (!t)
12828 /* Insert this one into the hash table. */
12829 t = cl_optimization_node;
12830 *slot = t;
12832 /* Make a new node for next time round. */
12833 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12836 return t;
12839 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12841 tree
12842 build_target_option_node (struct gcc_options *opts)
12844 tree t;
12846 /* Use the cache of optimization nodes. */
12848 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12849 opts);
12851 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12852 t = *slot;
12853 if (!t)
12855 /* Insert this one into the hash table. */
12856 t = cl_target_option_node;
12857 *slot = t;
12859 /* Make a new node for next time round. */
12860 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12863 return t;
12866 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12867 so that they aren't saved during PCH writing. */
12869 void
12870 prepare_target_option_nodes_for_pch (void)
12872 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12873 for (; iter != cl_option_hash_table->end (); ++iter)
12874 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12875 TREE_TARGET_GLOBALS (*iter) = NULL;
12878 /* Determine the "ultimate origin" of a block. */
12880 tree
12881 block_ultimate_origin (const_tree block)
12883 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12885 if (origin == NULL_TREE)
12886 return NULL_TREE;
12887 else
12889 gcc_checking_assert ((DECL_P (origin)
12890 && DECL_ORIGIN (origin) == origin)
12891 || BLOCK_ORIGIN (origin) == origin);
12892 return origin;
12896 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12897 no instruction. */
12899 bool
12900 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12902 /* Do not strip casts into or out of differing address spaces. */
12903 if (POINTER_TYPE_P (outer_type)
12904 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12906 if (!POINTER_TYPE_P (inner_type)
12907 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12908 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12909 return false;
12911 else if (POINTER_TYPE_P (inner_type)
12912 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12914 /* We already know that outer_type is not a pointer with
12915 a non-generic address space. */
12916 return false;
12919 /* Use precision rather then machine mode when we can, which gives
12920 the correct answer even for submode (bit-field) types. */
12921 if ((INTEGRAL_TYPE_P (outer_type)
12922 || POINTER_TYPE_P (outer_type)
12923 || TREE_CODE (outer_type) == OFFSET_TYPE)
12924 && (INTEGRAL_TYPE_P (inner_type)
12925 || POINTER_TYPE_P (inner_type)
12926 || TREE_CODE (inner_type) == OFFSET_TYPE))
12927 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12929 /* Otherwise fall back on comparing machine modes (e.g. for
12930 aggregate types, floats). */
12931 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12934 /* Return true iff conversion in EXP generates no instruction. Mark
12935 it inline so that we fully inline into the stripping functions even
12936 though we have two uses of this function. */
12938 static inline bool
12939 tree_nop_conversion (const_tree exp)
12941 tree outer_type, inner_type;
12943 if (location_wrapper_p (exp))
12944 return true;
12945 if (!CONVERT_EXPR_P (exp)
12946 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12947 return false;
12949 outer_type = TREE_TYPE (exp);
12950 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12951 if (!inner_type || inner_type == error_mark_node)
12952 return false;
12954 return tree_nop_conversion_p (outer_type, inner_type);
12957 /* Return true iff conversion in EXP generates no instruction. Don't
12958 consider conversions changing the signedness. */
12960 static bool
12961 tree_sign_nop_conversion (const_tree exp)
12963 tree outer_type, inner_type;
12965 if (!tree_nop_conversion (exp))
12966 return false;
12968 outer_type = TREE_TYPE (exp);
12969 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12971 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12972 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12975 /* Strip conversions from EXP according to tree_nop_conversion and
12976 return the resulting expression. */
12978 tree
12979 tree_strip_nop_conversions (tree exp)
12981 while (tree_nop_conversion (exp))
12982 exp = TREE_OPERAND (exp, 0);
12983 return exp;
12986 /* Strip conversions from EXP according to tree_sign_nop_conversion
12987 and return the resulting expression. */
12989 tree
12990 tree_strip_sign_nop_conversions (tree exp)
12992 while (tree_sign_nop_conversion (exp))
12993 exp = TREE_OPERAND (exp, 0);
12994 return exp;
12997 /* Avoid any floating point extensions from EXP. */
12998 tree
12999 strip_float_extensions (tree exp)
13001 tree sub, expt, subt;
13003 /* For floating point constant look up the narrowest type that can hold
13004 it properly and handle it like (type)(narrowest_type)constant.
13005 This way we can optimize for instance a=a*2.0 where "a" is float
13006 but 2.0 is double constant. */
13007 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
13009 REAL_VALUE_TYPE orig;
13010 tree type = NULL;
13012 orig = TREE_REAL_CST (exp);
13013 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
13014 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
13015 type = float_type_node;
13016 else if (TYPE_PRECISION (TREE_TYPE (exp))
13017 > TYPE_PRECISION (double_type_node)
13018 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
13019 type = double_type_node;
13020 if (type)
13021 return build_real_truncate (type, orig);
13024 if (!CONVERT_EXPR_P (exp))
13025 return exp;
13027 sub = TREE_OPERAND (exp, 0);
13028 subt = TREE_TYPE (sub);
13029 expt = TREE_TYPE (exp);
13031 if (!FLOAT_TYPE_P (subt))
13032 return exp;
13034 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
13035 return exp;
13037 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
13038 return exp;
13040 return strip_float_extensions (sub);
13043 /* Strip out all handled components that produce invariant
13044 offsets. */
13046 const_tree
13047 strip_invariant_refs (const_tree op)
13049 while (handled_component_p (op))
13051 switch (TREE_CODE (op))
13053 case ARRAY_REF:
13054 case ARRAY_RANGE_REF:
13055 if (!is_gimple_constant (TREE_OPERAND (op, 1))
13056 || TREE_OPERAND (op, 2) != NULL_TREE
13057 || TREE_OPERAND (op, 3) != NULL_TREE)
13058 return NULL;
13059 break;
13061 case COMPONENT_REF:
13062 if (TREE_OPERAND (op, 2) != NULL_TREE)
13063 return NULL;
13064 break;
13066 default:;
13068 op = TREE_OPERAND (op, 0);
13071 return op;
13074 static GTY(()) tree gcc_eh_personality_decl;
13076 /* Return the GCC personality function decl. */
13078 tree
13079 lhd_gcc_personality (void)
13081 if (!gcc_eh_personality_decl)
13082 gcc_eh_personality_decl = build_personality_function ("gcc");
13083 return gcc_eh_personality_decl;
13086 /* TARGET is a call target of GIMPLE call statement
13087 (obtained by gimple_call_fn). Return true if it is
13088 OBJ_TYPE_REF representing an virtual call of C++ method.
13089 (As opposed to OBJ_TYPE_REF representing objc calls
13090 through a cast where middle-end devirtualization machinery
13091 can't apply.) */
13093 bool
13094 virtual_method_call_p (const_tree target)
13096 if (TREE_CODE (target) != OBJ_TYPE_REF)
13097 return false;
13098 tree t = TREE_TYPE (target);
13099 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
13100 t = TREE_TYPE (t);
13101 if (TREE_CODE (t) == FUNCTION_TYPE)
13102 return false;
13103 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
13104 /* If we do not have BINFO associated, it means that type was built
13105 without devirtualization enabled. Do not consider this a virtual
13106 call. */
13107 if (!TYPE_BINFO (obj_type_ref_class (target)))
13108 return false;
13109 return true;
13112 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
13114 static tree
13115 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
13117 unsigned int i;
13118 tree base_binfo, b;
13120 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13121 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
13122 && types_same_for_odr (TREE_TYPE (base_binfo), type))
13123 return base_binfo;
13124 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
13125 return b;
13126 return NULL;
13129 /* Try to find a base info of BINFO that would have its field decl at offset
13130 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
13131 found, return, otherwise return NULL_TREE. */
13133 tree
13134 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
13136 tree type = BINFO_TYPE (binfo);
13138 while (true)
13140 HOST_WIDE_INT pos, size;
13141 tree fld;
13142 int i;
13144 if (types_same_for_odr (type, expected_type))
13145 return binfo;
13146 if (maybe_lt (offset, 0))
13147 return NULL_TREE;
13149 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
13151 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
13152 continue;
13154 pos = int_bit_position (fld);
13155 size = tree_to_uhwi (DECL_SIZE (fld));
13156 if (known_in_range_p (offset, pos, size))
13157 break;
13159 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
13160 return NULL_TREE;
13162 /* Offset 0 indicates the primary base, whose vtable contents are
13163 represented in the binfo for the derived class. */
13164 else if (maybe_ne (offset, 0))
13166 tree found_binfo = NULL, base_binfo;
13167 /* Offsets in BINFO are in bytes relative to the whole structure
13168 while POS is in bits relative to the containing field. */
13169 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
13170 / BITS_PER_UNIT);
13172 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13173 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
13174 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
13176 found_binfo = base_binfo;
13177 break;
13179 if (found_binfo)
13180 binfo = found_binfo;
13181 else
13182 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
13183 binfo_offset);
13186 type = TREE_TYPE (fld);
13187 offset -= pos;
13191 /* Returns true if X is a typedef decl. */
13193 bool
13194 is_typedef_decl (const_tree x)
13196 return (x && TREE_CODE (x) == TYPE_DECL
13197 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
13200 /* Returns true iff TYPE is a type variant created for a typedef. */
13202 bool
13203 typedef_variant_p (const_tree type)
13205 return is_typedef_decl (TYPE_NAME (type));
13208 /* A class to handle converting a string that might contain
13209 control characters, (eg newline, form-feed, etc), into one
13210 in which contains escape sequences instead. */
13212 class escaped_string
13214 public:
13215 escaped_string () { m_owned = false; m_str = NULL; };
13216 ~escaped_string () { if (m_owned) free (m_str); }
13217 operator const char *() const { return (const char *) m_str; }
13218 void escape (const char *);
13219 private:
13220 char *m_str;
13221 bool m_owned;
13224 /* PR 84195: Replace control characters in "unescaped" with their
13225 escaped equivalents. Allow newlines if -fmessage-length has
13226 been set to a non-zero value. This is done here, rather than
13227 where the attribute is recorded as the message length can
13228 change between these two locations. */
13230 void
13231 escaped_string::escape (const char *unescaped)
13233 char *escaped;
13234 size_t i, new_i, len;
13236 if (m_owned)
13237 free (m_str);
13239 m_str = const_cast<char *> (unescaped);
13240 m_owned = false;
13242 if (unescaped == NULL || *unescaped == 0)
13243 return;
13245 len = strlen (unescaped);
13246 escaped = NULL;
13247 new_i = 0;
13249 for (i = 0; i < len; i++)
13251 char c = unescaped[i];
13253 if (!ISCNTRL (c))
13255 if (escaped)
13256 escaped[new_i++] = c;
13257 continue;
13260 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13262 if (escaped == NULL)
13264 /* We only allocate space for a new string if we
13265 actually encounter a control character that
13266 needs replacing. */
13267 escaped = (char *) xmalloc (len * 2 + 1);
13268 strncpy (escaped, unescaped, i);
13269 new_i = i;
13272 escaped[new_i++] = '\\';
13274 switch (c)
13276 case '\a': escaped[new_i++] = 'a'; break;
13277 case '\b': escaped[new_i++] = 'b'; break;
13278 case '\f': escaped[new_i++] = 'f'; break;
13279 case '\n': escaped[new_i++] = 'n'; break;
13280 case '\r': escaped[new_i++] = 'r'; break;
13281 case '\t': escaped[new_i++] = 't'; break;
13282 case '\v': escaped[new_i++] = 'v'; break;
13283 default: escaped[new_i++] = '?'; break;
13286 else if (escaped)
13287 escaped[new_i++] = c;
13290 if (escaped)
13292 escaped[new_i] = 0;
13293 m_str = escaped;
13294 m_owned = true;
13298 /* Warn about a use of an identifier which was marked deprecated. Returns
13299 whether a warning was given. */
13301 bool
13302 warn_deprecated_use (tree node, tree attr)
13304 escaped_string msg;
13306 if (node == 0 || !warn_deprecated_decl)
13307 return false;
13309 if (!attr)
13311 if (DECL_P (node))
13312 attr = DECL_ATTRIBUTES (node);
13313 else if (TYPE_P (node))
13315 tree decl = TYPE_STUB_DECL (node);
13316 if (decl)
13317 attr = lookup_attribute ("deprecated",
13318 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13322 if (attr)
13323 attr = lookup_attribute ("deprecated", attr);
13325 if (attr)
13326 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13328 bool w = false;
13329 if (DECL_P (node))
13331 auto_diagnostic_group d;
13332 if (msg)
13333 w = warning (OPT_Wdeprecated_declarations,
13334 "%qD is deprecated: %s", node, (const char *) msg);
13335 else
13336 w = warning (OPT_Wdeprecated_declarations,
13337 "%qD is deprecated", node);
13338 if (w)
13339 inform (DECL_SOURCE_LOCATION (node), "declared here");
13341 else if (TYPE_P (node))
13343 tree what = NULL_TREE;
13344 tree decl = TYPE_STUB_DECL (node);
13346 if (TYPE_NAME (node))
13348 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13349 what = TYPE_NAME (node);
13350 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13351 && DECL_NAME (TYPE_NAME (node)))
13352 what = DECL_NAME (TYPE_NAME (node));
13355 auto_diagnostic_group d;
13356 if (what)
13358 if (msg)
13359 w = warning (OPT_Wdeprecated_declarations,
13360 "%qE is deprecated: %s", what, (const char *) msg);
13361 else
13362 w = warning (OPT_Wdeprecated_declarations,
13363 "%qE is deprecated", what);
13365 else
13367 if (msg)
13368 w = warning (OPT_Wdeprecated_declarations,
13369 "type is deprecated: %s", (const char *) msg);
13370 else
13371 w = warning (OPT_Wdeprecated_declarations,
13372 "type is deprecated");
13375 if (w && decl)
13376 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13379 return w;
13382 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13383 somewhere in it. */
13385 bool
13386 contains_bitfld_component_ref_p (const_tree ref)
13388 while (handled_component_p (ref))
13390 if (TREE_CODE (ref) == COMPONENT_REF
13391 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13392 return true;
13393 ref = TREE_OPERAND (ref, 0);
13396 return false;
13399 /* Try to determine whether a TRY_CATCH expression can fall through.
13400 This is a subroutine of block_may_fallthru. */
13402 static bool
13403 try_catch_may_fallthru (const_tree stmt)
13405 tree_stmt_iterator i;
13407 /* If the TRY block can fall through, the whole TRY_CATCH can
13408 fall through. */
13409 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13410 return true;
13412 i = tsi_start (TREE_OPERAND (stmt, 1));
13413 switch (TREE_CODE (tsi_stmt (i)))
13415 case CATCH_EXPR:
13416 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13417 catch expression and a body. The whole TRY_CATCH may fall
13418 through iff any of the catch bodies falls through. */
13419 for (; !tsi_end_p (i); tsi_next (&i))
13421 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13422 return true;
13424 return false;
13426 case EH_FILTER_EXPR:
13427 /* The exception filter expression only matters if there is an
13428 exception. If the exception does not match EH_FILTER_TYPES,
13429 we will execute EH_FILTER_FAILURE, and we will fall through
13430 if that falls through. If the exception does match
13431 EH_FILTER_TYPES, the stack unwinder will continue up the
13432 stack, so we will not fall through. We don't know whether we
13433 will throw an exception which matches EH_FILTER_TYPES or not,
13434 so we just ignore EH_FILTER_TYPES and assume that we might
13435 throw an exception which doesn't match. */
13436 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13438 default:
13439 /* This case represents statements to be executed when an
13440 exception occurs. Those statements are implicitly followed
13441 by a RESX statement to resume execution after the exception.
13442 So in this case the TRY_CATCH never falls through. */
13443 return false;
13447 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13448 need not be 100% accurate; simply be conservative and return true if we
13449 don't know. This is used only to avoid stupidly generating extra code.
13450 If we're wrong, we'll just delete the extra code later. */
13452 bool
13453 block_may_fallthru (const_tree block)
13455 /* This CONST_CAST is okay because expr_last returns its argument
13456 unmodified and we assign it to a const_tree. */
13457 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13459 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13461 case GOTO_EXPR:
13462 case RETURN_EXPR:
13463 /* Easy cases. If the last statement of the block implies
13464 control transfer, then we can't fall through. */
13465 return false;
13467 case SWITCH_EXPR:
13468 /* If there is a default: label or case labels cover all possible
13469 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13470 to some case label in all cases and all we care is whether the
13471 SWITCH_BODY falls through. */
13472 if (SWITCH_ALL_CASES_P (stmt))
13473 return block_may_fallthru (SWITCH_BODY (stmt));
13474 return true;
13476 case COND_EXPR:
13477 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13478 return true;
13479 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13481 case BIND_EXPR:
13482 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13484 case TRY_CATCH_EXPR:
13485 return try_catch_may_fallthru (stmt);
13487 case TRY_FINALLY_EXPR:
13488 /* The finally clause is always executed after the try clause,
13489 so if it does not fall through, then the try-finally will not
13490 fall through. Otherwise, if the try clause does not fall
13491 through, then when the finally clause falls through it will
13492 resume execution wherever the try clause was going. So the
13493 whole try-finally will only fall through if both the try
13494 clause and the finally clause fall through. */
13495 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13496 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13498 case EH_ELSE_EXPR:
13499 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13501 case MODIFY_EXPR:
13502 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13503 stmt = TREE_OPERAND (stmt, 1);
13504 else
13505 return true;
13506 /* FALLTHRU */
13508 case CALL_EXPR:
13509 /* Functions that do not return do not fall through. */
13510 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13512 case CLEANUP_POINT_EXPR:
13513 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13515 case TARGET_EXPR:
13516 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13518 case ERROR_MARK:
13519 return true;
13521 default:
13522 return lang_hooks.block_may_fallthru (stmt);
13526 /* True if we are using EH to handle cleanups. */
13527 static bool using_eh_for_cleanups_flag = false;
13529 /* This routine is called from front ends to indicate eh should be used for
13530 cleanups. */
13531 void
13532 using_eh_for_cleanups (void)
13534 using_eh_for_cleanups_flag = true;
13537 /* Query whether EH is used for cleanups. */
13538 bool
13539 using_eh_for_cleanups_p (void)
13541 return using_eh_for_cleanups_flag;
13544 /* Wrapper for tree_code_name to ensure that tree code is valid */
13545 const char *
13546 get_tree_code_name (enum tree_code code)
13548 const char *invalid = "<invalid tree code>";
13550 if (code >= MAX_TREE_CODES)
13552 if (code == 0xa5a5)
13553 return "ggc_freed";
13554 return invalid;
13557 return tree_code_name[code];
13560 /* Drops the TREE_OVERFLOW flag from T. */
13562 tree
13563 drop_tree_overflow (tree t)
13565 gcc_checking_assert (TREE_OVERFLOW (t));
13567 /* For tree codes with a sharing machinery re-build the result. */
13568 if (poly_int_tree_p (t))
13569 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13571 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13572 and canonicalize the result. */
13573 if (TREE_CODE (t) == VECTOR_CST)
13575 tree_vector_builder builder;
13576 builder.new_unary_operation (TREE_TYPE (t), t, true);
13577 unsigned int count = builder.encoded_nelts ();
13578 for (unsigned int i = 0; i < count; ++i)
13580 tree elt = VECTOR_CST_ELT (t, i);
13581 if (TREE_OVERFLOW (elt))
13582 elt = drop_tree_overflow (elt);
13583 builder.quick_push (elt);
13585 return builder.build ();
13588 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13589 and drop the flag. */
13590 t = copy_node (t);
13591 TREE_OVERFLOW (t) = 0;
13593 /* For constants that contain nested constants, drop the flag
13594 from those as well. */
13595 if (TREE_CODE (t) == COMPLEX_CST)
13597 if (TREE_OVERFLOW (TREE_REALPART (t)))
13598 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13599 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13600 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13603 return t;
13606 /* Given a memory reference expression T, return its base address.
13607 The base address of a memory reference expression is the main
13608 object being referenced. For instance, the base address for
13609 'array[i].fld[j]' is 'array'. You can think of this as stripping
13610 away the offset part from a memory address.
13612 This function calls handled_component_p to strip away all the inner
13613 parts of the memory reference until it reaches the base object. */
13615 tree
13616 get_base_address (tree t)
13618 while (handled_component_p (t))
13619 t = TREE_OPERAND (t, 0);
13621 if ((TREE_CODE (t) == MEM_REF
13622 || TREE_CODE (t) == TARGET_MEM_REF)
13623 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13624 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13626 /* ??? Either the alias oracle or all callers need to properly deal
13627 with WITH_SIZE_EXPRs before we can look through those. */
13628 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13629 return NULL_TREE;
13631 return t;
13634 /* Return a tree of sizetype representing the size, in bytes, of the element
13635 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13637 tree
13638 array_ref_element_size (tree exp)
13640 tree aligned_size = TREE_OPERAND (exp, 3);
13641 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13642 location_t loc = EXPR_LOCATION (exp);
13644 /* If a size was specified in the ARRAY_REF, it's the size measured
13645 in alignment units of the element type. So multiply by that value. */
13646 if (aligned_size)
13648 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13649 sizetype from another type of the same width and signedness. */
13650 if (TREE_TYPE (aligned_size) != sizetype)
13651 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13652 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13653 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13656 /* Otherwise, take the size from that of the element type. Substitute
13657 any PLACEHOLDER_EXPR that we have. */
13658 else
13659 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13662 /* Return a tree representing the lower bound of the array mentioned in
13663 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13665 tree
13666 array_ref_low_bound (tree exp)
13668 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13670 /* If a lower bound is specified in EXP, use it. */
13671 if (TREE_OPERAND (exp, 2))
13672 return TREE_OPERAND (exp, 2);
13674 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13675 substituting for a PLACEHOLDER_EXPR as needed. */
13676 if (domain_type && TYPE_MIN_VALUE (domain_type))
13677 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13679 /* Otherwise, return a zero of the appropriate type. */
13680 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13683 /* Return a tree representing the upper bound of the array mentioned in
13684 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13686 tree
13687 array_ref_up_bound (tree exp)
13689 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13691 /* If there is a domain type and it has an upper bound, use it, substituting
13692 for a PLACEHOLDER_EXPR as needed. */
13693 if (domain_type && TYPE_MAX_VALUE (domain_type))
13694 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13696 /* Otherwise fail. */
13697 return NULL_TREE;
13700 /* Returns true if REF is an array reference or a component reference
13701 to an array at the end of a structure.
13702 If this is the case, the array may be allocated larger
13703 than its upper bound implies. */
13705 bool
13706 array_at_struct_end_p (tree ref)
13708 tree atype;
13710 if (TREE_CODE (ref) == ARRAY_REF
13711 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13713 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13714 ref = TREE_OPERAND (ref, 0);
13716 else if (TREE_CODE (ref) == COMPONENT_REF
13717 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13718 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13719 else
13720 return false;
13722 if (TREE_CODE (ref) == STRING_CST)
13723 return false;
13725 tree ref_to_array = ref;
13726 while (handled_component_p (ref))
13728 /* If the reference chain contains a component reference to a
13729 non-union type and there follows another field the reference
13730 is not at the end of a structure. */
13731 if (TREE_CODE (ref) == COMPONENT_REF)
13733 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13735 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13736 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13737 nextf = DECL_CHAIN (nextf);
13738 if (nextf)
13739 return false;
13742 /* If we have a multi-dimensional array we do not consider
13743 a non-innermost dimension as flex array if the whole
13744 multi-dimensional array is at struct end.
13745 Same for an array of aggregates with a trailing array
13746 member. */
13747 else if (TREE_CODE (ref) == ARRAY_REF)
13748 return false;
13749 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13751 /* If we view an underlying object as sth else then what we
13752 gathered up to now is what we have to rely on. */
13753 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13754 break;
13755 else
13756 gcc_unreachable ();
13758 ref = TREE_OPERAND (ref, 0);
13761 /* The array now is at struct end. Treat flexible arrays as
13762 always subject to extend, even into just padding constrained by
13763 an underlying decl. */
13764 if (! TYPE_SIZE (atype)
13765 || ! TYPE_DOMAIN (atype)
13766 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13767 return true;
13769 if (TREE_CODE (ref) == MEM_REF
13770 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13771 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13773 /* If the reference is based on a declared entity, the size of the array
13774 is constrained by its given domain. (Do not trust commons PR/69368). */
13775 if (DECL_P (ref)
13776 && !(flag_unconstrained_commons
13777 && VAR_P (ref) && DECL_COMMON (ref))
13778 && DECL_SIZE_UNIT (ref)
13779 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13781 /* Check whether the array domain covers all of the available
13782 padding. */
13783 poly_int64 offset;
13784 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13785 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13786 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13787 return true;
13788 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13789 return true;
13791 /* If at least one extra element fits it is a flexarray. */
13792 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13793 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13794 + 2)
13795 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13796 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13797 return true;
13799 return false;
13802 return true;
13805 /* Return a tree representing the offset, in bytes, of the field referenced
13806 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13808 tree
13809 component_ref_field_offset (tree exp)
13811 tree aligned_offset = TREE_OPERAND (exp, 2);
13812 tree field = TREE_OPERAND (exp, 1);
13813 location_t loc = EXPR_LOCATION (exp);
13815 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13816 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13817 value. */
13818 if (aligned_offset)
13820 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13821 sizetype from another type of the same width and signedness. */
13822 if (TREE_TYPE (aligned_offset) != sizetype)
13823 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13824 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13825 size_int (DECL_OFFSET_ALIGN (field)
13826 / BITS_PER_UNIT));
13829 /* Otherwise, take the offset from that of the field. Substitute
13830 any PLACEHOLDER_EXPR that we have. */
13831 else
13832 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13835 /* Return the machine mode of T. For vectors, returns the mode of the
13836 inner type. The main use case is to feed the result to HONOR_NANS,
13837 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13839 machine_mode
13840 element_mode (const_tree t)
13842 if (!TYPE_P (t))
13843 t = TREE_TYPE (t);
13844 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13845 t = TREE_TYPE (t);
13846 return TYPE_MODE (t);
13849 /* Vector types need to re-check the target flags each time we report
13850 the machine mode. We need to do this because attribute target can
13851 change the result of vector_mode_supported_p and have_regs_of_mode
13852 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13853 change on a per-function basis. */
13854 /* ??? Possibly a better solution is to run through all the types
13855 referenced by a function and re-compute the TYPE_MODE once, rather
13856 than make the TYPE_MODE macro call a function. */
13858 machine_mode
13859 vector_type_mode (const_tree t)
13861 machine_mode mode;
13863 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13865 mode = t->type_common.mode;
13866 if (VECTOR_MODE_P (mode)
13867 && (!targetm.vector_mode_supported_p (mode)
13868 || !have_regs_of_mode[mode]))
13870 scalar_int_mode innermode;
13872 /* For integers, try mapping it to a same-sized scalar mode. */
13873 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13875 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13876 * GET_MODE_BITSIZE (innermode));
13877 scalar_int_mode mode;
13878 if (int_mode_for_size (size, 0).exists (&mode)
13879 && have_regs_of_mode[mode])
13880 return mode;
13883 return BLKmode;
13886 return mode;
13889 /* Verify that basic properties of T match TV and thus T can be a variant of
13890 TV. TV should be the more specified variant (i.e. the main variant). */
13892 static bool
13893 verify_type_variant (const_tree t, tree tv)
13895 /* Type variant can differ by:
13897 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13898 ENCODE_QUAL_ADDR_SPACE.
13899 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13900 in this case some values may not be set in the variant types
13901 (see TYPE_COMPLETE_P checks).
13902 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13903 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13904 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13905 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13906 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13907 this is necessary to make it possible to merge types form different TUs
13908 - arrays, pointers and references may have TREE_TYPE that is a variant
13909 of TREE_TYPE of their main variants.
13910 - aggregates may have new TYPE_FIELDS list that list variants of
13911 the main variant TYPE_FIELDS.
13912 - vector types may differ by TYPE_VECTOR_OPAQUE
13915 /* Convenience macro for matching individual fields. */
13916 #define verify_variant_match(flag) \
13917 do { \
13918 if (flag (tv) != flag (t)) \
13920 error ("type variant differs by %s", #flag); \
13921 debug_tree (tv); \
13922 return false; \
13924 } while (false)
13926 /* tree_base checks. */
13928 verify_variant_match (TREE_CODE);
13929 /* FIXME: Ada builds non-artificial variants of artificial types. */
13930 if (TYPE_ARTIFICIAL (tv) && 0)
13931 verify_variant_match (TYPE_ARTIFICIAL);
13932 if (POINTER_TYPE_P (tv))
13933 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13934 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13935 verify_variant_match (TYPE_UNSIGNED);
13936 verify_variant_match (TYPE_PACKED);
13937 if (TREE_CODE (t) == REFERENCE_TYPE)
13938 verify_variant_match (TYPE_REF_IS_RVALUE);
13939 if (AGGREGATE_TYPE_P (t))
13940 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13941 else
13942 verify_variant_match (TYPE_SATURATING);
13943 /* FIXME: This check trigger during libstdc++ build. */
13944 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13945 verify_variant_match (TYPE_FINAL_P);
13947 /* tree_type_common checks. */
13949 if (COMPLETE_TYPE_P (t))
13951 verify_variant_match (TYPE_MODE);
13952 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13953 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13954 verify_variant_match (TYPE_SIZE);
13955 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13956 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13957 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13959 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13960 TYPE_SIZE_UNIT (tv), 0));
13961 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13962 debug_tree (tv);
13963 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13964 debug_tree (TYPE_SIZE_UNIT (tv));
13965 error ("type%'s %<TYPE_SIZE_UNIT%>");
13966 debug_tree (TYPE_SIZE_UNIT (t));
13967 return false;
13970 verify_variant_match (TYPE_PRECISION);
13971 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13972 if (RECORD_OR_UNION_TYPE_P (t))
13973 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13974 else if (TREE_CODE (t) == ARRAY_TYPE)
13975 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13976 /* During LTO we merge variant lists from diferent translation units
13977 that may differ BY TYPE_CONTEXT that in turn may point
13978 to TRANSLATION_UNIT_DECL.
13979 Ada also builds variants of types with different TYPE_CONTEXT. */
13980 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13981 verify_variant_match (TYPE_CONTEXT);
13982 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13983 verify_variant_match (TYPE_STRING_FLAG);
13984 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13985 verify_variant_match (TYPE_CXX_ODR_P);
13986 if (TYPE_ALIAS_SET_KNOWN_P (t))
13988 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13989 debug_tree (tv);
13990 return false;
13993 /* tree_type_non_common checks. */
13995 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13996 and dangle the pointer from time to time. */
13997 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13998 && (in_lto_p || !TYPE_VFIELD (tv)
13999 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14001 error ("type variant has different %<TYPE_VFIELD%>");
14002 debug_tree (tv);
14003 return false;
14005 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14006 || TREE_CODE (t) == INTEGER_TYPE
14007 || TREE_CODE (t) == BOOLEAN_TYPE
14008 || TREE_CODE (t) == REAL_TYPE
14009 || TREE_CODE (t) == FIXED_POINT_TYPE)
14011 verify_variant_match (TYPE_MAX_VALUE);
14012 verify_variant_match (TYPE_MIN_VALUE);
14014 if (TREE_CODE (t) == METHOD_TYPE)
14015 verify_variant_match (TYPE_METHOD_BASETYPE);
14016 if (TREE_CODE (t) == OFFSET_TYPE)
14017 verify_variant_match (TYPE_OFFSET_BASETYPE);
14018 if (TREE_CODE (t) == ARRAY_TYPE)
14019 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14020 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14021 or even type's main variant. This is needed to make bootstrap pass
14022 and the bug seems new in GCC 5.
14023 C++ FE should be updated to make this consistent and we should check
14024 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14025 is a match with main variant.
14027 Also disable the check for Java for now because of parser hack that builds
14028 first an dummy BINFO and then sometimes replace it by real BINFO in some
14029 of the copies. */
14030 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14031 && TYPE_BINFO (t) != TYPE_BINFO (tv)
14032 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14033 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14034 at LTO time only. */
14035 && (in_lto_p && odr_type_p (t)))
14037 error ("type variant has different %<TYPE_BINFO%>");
14038 debug_tree (tv);
14039 error ("type variant%'s %<TYPE_BINFO%>");
14040 debug_tree (TYPE_BINFO (tv));
14041 error ("type%'s %<TYPE_BINFO%>");
14042 debug_tree (TYPE_BINFO (t));
14043 return false;
14046 /* Check various uses of TYPE_VALUES_RAW. */
14047 if (TREE_CODE (t) == ENUMERAL_TYPE
14048 && TYPE_VALUES (t))
14049 verify_variant_match (TYPE_VALUES);
14050 else if (TREE_CODE (t) == ARRAY_TYPE)
14051 verify_variant_match (TYPE_DOMAIN);
14052 /* Permit incomplete variants of complete type. While FEs may complete
14053 all variants, this does not happen for C++ templates in all cases. */
14054 else if (RECORD_OR_UNION_TYPE_P (t)
14055 && COMPLETE_TYPE_P (t)
14056 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14058 tree f1, f2;
14060 /* Fortran builds qualified variants as new records with items of
14061 qualified type. Verify that they looks same. */
14062 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14063 f1 && f2;
14064 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14065 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14066 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14067 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14068 /* FIXME: gfc_nonrestricted_type builds all types as variants
14069 with exception of pointer types. It deeply copies the type
14070 which means that we may end up with a variant type
14071 referring non-variant pointer. We may change it to
14072 produce types as variants, too, like
14073 objc_get_protocol_qualified_type does. */
14074 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14075 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14076 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14077 break;
14078 if (f1 || f2)
14080 error ("type variant has different %<TYPE_FIELDS%>");
14081 debug_tree (tv);
14082 error ("first mismatch is field");
14083 debug_tree (f1);
14084 error ("and field");
14085 debug_tree (f2);
14086 return false;
14089 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14090 verify_variant_match (TYPE_ARG_TYPES);
14091 /* For C++ the qualified variant of array type is really an array type
14092 of qualified TREE_TYPE.
14093 objc builds variants of pointer where pointer to type is a variant, too
14094 in objc_get_protocol_qualified_type. */
14095 if (TREE_TYPE (t) != TREE_TYPE (tv)
14096 && ((TREE_CODE (t) != ARRAY_TYPE
14097 && !POINTER_TYPE_P (t))
14098 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14099 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14101 error ("type variant has different %<TREE_TYPE%>");
14102 debug_tree (tv);
14103 error ("type variant%'s %<TREE_TYPE%>");
14104 debug_tree (TREE_TYPE (tv));
14105 error ("type%'s %<TREE_TYPE%>");
14106 debug_tree (TREE_TYPE (t));
14107 return false;
14109 if (type_with_alias_set_p (t)
14110 && !gimple_canonical_types_compatible_p (t, tv, false))
14112 error ("type is not compatible with its variant");
14113 debug_tree (tv);
14114 error ("type variant%'s %<TREE_TYPE%>");
14115 debug_tree (TREE_TYPE (tv));
14116 error ("type%'s %<TREE_TYPE%>");
14117 debug_tree (TREE_TYPE (t));
14118 return false;
14120 return true;
14121 #undef verify_variant_match
14125 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14126 the middle-end types_compatible_p function. It needs to avoid
14127 claiming types are different for types that should be treated
14128 the same with respect to TBAA. Canonical types are also used
14129 for IL consistency checks via the useless_type_conversion_p
14130 predicate which does not handle all type kinds itself but falls
14131 back to pointer-comparison of TYPE_CANONICAL for aggregates
14132 for example. */
14134 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14135 type calculation because we need to allow inter-operability between signed
14136 and unsigned variants. */
14138 bool
14139 type_with_interoperable_signedness (const_tree type)
14141 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14142 signed char and unsigned char. Similarly fortran FE builds
14143 C_SIZE_T as signed type, while C defines it unsigned. */
14145 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14146 == INTEGER_TYPE
14147 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14148 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14151 /* Return true iff T1 and T2 are structurally identical for what
14152 TBAA is concerned.
14153 This function is used both by lto.c canonical type merging and by the
14154 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14155 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14156 only for LTO because only in these cases TYPE_CANONICAL equivalence
14157 correspond to one defined by gimple_canonical_types_compatible_p. */
14159 bool
14160 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14161 bool trust_type_canonical)
14163 /* Type variants should be same as the main variant. When not doing sanity
14164 checking to verify this fact, go to main variants and save some work. */
14165 if (trust_type_canonical)
14167 t1 = TYPE_MAIN_VARIANT (t1);
14168 t2 = TYPE_MAIN_VARIANT (t2);
14171 /* Check first for the obvious case of pointer identity. */
14172 if (t1 == t2)
14173 return true;
14175 /* Check that we have two types to compare. */
14176 if (t1 == NULL_TREE || t2 == NULL_TREE)
14177 return false;
14179 /* We consider complete types always compatible with incomplete type.
14180 This does not make sense for canonical type calculation and thus we
14181 need to ensure that we are never called on it.
14183 FIXME: For more correctness the function probably should have three modes
14184 1) mode assuming that types are complete mathcing their structure
14185 2) mode allowing incomplete types but producing equivalence classes
14186 and thus ignoring all info from complete types
14187 3) mode allowing incomplete types to match complete but checking
14188 compatibility between complete types.
14190 1 and 2 can be used for canonical type calculation. 3 is the real
14191 definition of type compatibility that can be used i.e. for warnings during
14192 declaration merging. */
14194 gcc_assert (!trust_type_canonical
14195 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14197 /* If the types have been previously registered and found equal
14198 they still are. */
14200 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14201 && trust_type_canonical)
14203 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14204 they are always NULL, but they are set to non-NULL for types
14205 constructed by build_pointer_type and variants. In this case the
14206 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14207 all pointers are considered equal. Be sure to not return false
14208 negatives. */
14209 gcc_checking_assert (canonical_type_used_p (t1)
14210 && canonical_type_used_p (t2));
14211 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14214 /* For types where we do ODR based TBAA the canonical type is always
14215 set correctly, so we know that types are different if their
14216 canonical types does not match. */
14217 if (trust_type_canonical
14218 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14219 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14220 return false;
14222 /* Can't be the same type if the types don't have the same code. */
14223 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14224 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14225 return false;
14227 /* Qualifiers do not matter for canonical type comparison purposes. */
14229 /* Void types and nullptr types are always the same. */
14230 if (TREE_CODE (t1) == VOID_TYPE
14231 || TREE_CODE (t1) == NULLPTR_TYPE)
14232 return true;
14234 /* Can't be the same type if they have different mode. */
14235 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14236 return false;
14238 /* Non-aggregate types can be handled cheaply. */
14239 if (INTEGRAL_TYPE_P (t1)
14240 || SCALAR_FLOAT_TYPE_P (t1)
14241 || FIXED_POINT_TYPE_P (t1)
14242 || TREE_CODE (t1) == VECTOR_TYPE
14243 || TREE_CODE (t1) == COMPLEX_TYPE
14244 || TREE_CODE (t1) == OFFSET_TYPE
14245 || POINTER_TYPE_P (t1))
14247 /* Can't be the same type if they have different recision. */
14248 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14249 return false;
14251 /* In some cases the signed and unsigned types are required to be
14252 inter-operable. */
14253 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14254 && !type_with_interoperable_signedness (t1))
14255 return false;
14257 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14258 interoperable with "signed char". Unless all frontends are revisited
14259 to agree on these types, we must ignore the flag completely. */
14261 /* Fortran standard define C_PTR type that is compatible with every
14262 C pointer. For this reason we need to glob all pointers into one.
14263 Still pointers in different address spaces are not compatible. */
14264 if (POINTER_TYPE_P (t1))
14266 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14267 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14268 return false;
14271 /* Tail-recurse to components. */
14272 if (TREE_CODE (t1) == VECTOR_TYPE
14273 || TREE_CODE (t1) == COMPLEX_TYPE)
14274 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14275 TREE_TYPE (t2),
14276 trust_type_canonical);
14278 return true;
14281 /* Do type-specific comparisons. */
14282 switch (TREE_CODE (t1))
14284 case ARRAY_TYPE:
14285 /* Array types are the same if the element types are the same and
14286 the number of elements are the same. */
14287 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14288 trust_type_canonical)
14289 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14290 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14291 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14292 return false;
14293 else
14295 tree i1 = TYPE_DOMAIN (t1);
14296 tree i2 = TYPE_DOMAIN (t2);
14298 /* For an incomplete external array, the type domain can be
14299 NULL_TREE. Check this condition also. */
14300 if (i1 == NULL_TREE && i2 == NULL_TREE)
14301 return true;
14302 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14303 return false;
14304 else
14306 tree min1 = TYPE_MIN_VALUE (i1);
14307 tree min2 = TYPE_MIN_VALUE (i2);
14308 tree max1 = TYPE_MAX_VALUE (i1);
14309 tree max2 = TYPE_MAX_VALUE (i2);
14311 /* The minimum/maximum values have to be the same. */
14312 if ((min1 == min2
14313 || (min1 && min2
14314 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14315 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14316 || operand_equal_p (min1, min2, 0))))
14317 && (max1 == max2
14318 || (max1 && max2
14319 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14320 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14321 || operand_equal_p (max1, max2, 0)))))
14322 return true;
14323 else
14324 return false;
14328 case METHOD_TYPE:
14329 case FUNCTION_TYPE:
14330 /* Function types are the same if the return type and arguments types
14331 are the same. */
14332 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14333 trust_type_canonical))
14334 return false;
14336 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14337 return true;
14338 else
14340 tree parms1, parms2;
14342 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14343 parms1 && parms2;
14344 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14346 if (!gimple_canonical_types_compatible_p
14347 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14348 trust_type_canonical))
14349 return false;
14352 if (parms1 || parms2)
14353 return false;
14355 return true;
14358 case RECORD_TYPE:
14359 case UNION_TYPE:
14360 case QUAL_UNION_TYPE:
14362 tree f1, f2;
14364 /* Don't try to compare variants of an incomplete type, before
14365 TYPE_FIELDS has been copied around. */
14366 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14367 return true;
14370 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14371 return false;
14373 /* For aggregate types, all the fields must be the same. */
14374 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14375 f1 || f2;
14376 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14378 /* Skip non-fields and zero-sized fields. */
14379 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14380 || (DECL_SIZE (f1)
14381 && integer_zerop (DECL_SIZE (f1)))))
14382 f1 = TREE_CHAIN (f1);
14383 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14384 || (DECL_SIZE (f2)
14385 && integer_zerop (DECL_SIZE (f2)))))
14386 f2 = TREE_CHAIN (f2);
14387 if (!f1 || !f2)
14388 break;
14389 /* The fields must have the same name, offset and type. */
14390 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14391 || !gimple_compare_field_offset (f1, f2)
14392 || !gimple_canonical_types_compatible_p
14393 (TREE_TYPE (f1), TREE_TYPE (f2),
14394 trust_type_canonical))
14395 return false;
14398 /* If one aggregate has more fields than the other, they
14399 are not the same. */
14400 if (f1 || f2)
14401 return false;
14403 return true;
14406 default:
14407 /* Consider all types with language specific trees in them mutually
14408 compatible. This is executed only from verify_type and false
14409 positives can be tolerated. */
14410 gcc_assert (!in_lto_p);
14411 return true;
14415 /* Verify type T. */
14417 void
14418 verify_type (const_tree t)
14420 bool error_found = false;
14421 tree mv = TYPE_MAIN_VARIANT (t);
14422 if (!mv)
14424 error ("main variant is not defined");
14425 error_found = true;
14427 else if (mv != TYPE_MAIN_VARIANT (mv))
14429 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14430 debug_tree (mv);
14431 error_found = true;
14433 else if (t != mv && !verify_type_variant (t, mv))
14434 error_found = true;
14436 tree ct = TYPE_CANONICAL (t);
14437 if (!ct)
14439 else if (TYPE_CANONICAL (t) != ct)
14441 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14442 debug_tree (ct);
14443 error_found = true;
14445 /* Method and function types cannot be used to address memory and thus
14446 TYPE_CANONICAL really matters only for determining useless conversions.
14448 FIXME: C++ FE produce declarations of builtin functions that are not
14449 compatible with main variants. */
14450 else if (TREE_CODE (t) == FUNCTION_TYPE)
14452 else if (t != ct
14453 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14454 with variably sized arrays because their sizes possibly
14455 gimplified to different variables. */
14456 && !variably_modified_type_p (ct, NULL)
14457 && !gimple_canonical_types_compatible_p (t, ct, false)
14458 && COMPLETE_TYPE_P (t))
14460 error ("%<TYPE_CANONICAL%> is not compatible");
14461 debug_tree (ct);
14462 error_found = true;
14465 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14466 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14468 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14469 debug_tree (ct);
14470 error_found = true;
14472 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14474 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14475 debug_tree (ct);
14476 debug_tree (TYPE_MAIN_VARIANT (ct));
14477 error_found = true;
14481 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14482 if (RECORD_OR_UNION_TYPE_P (t))
14484 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14485 and danagle the pointer from time to time. */
14486 if (TYPE_VFIELD (t)
14487 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14488 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14490 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14491 debug_tree (TYPE_VFIELD (t));
14492 error_found = true;
14495 else if (TREE_CODE (t) == POINTER_TYPE)
14497 if (TYPE_NEXT_PTR_TO (t)
14498 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14500 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14501 debug_tree (TYPE_NEXT_PTR_TO (t));
14502 error_found = true;
14505 else if (TREE_CODE (t) == REFERENCE_TYPE)
14507 if (TYPE_NEXT_REF_TO (t)
14508 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14510 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14511 debug_tree (TYPE_NEXT_REF_TO (t));
14512 error_found = true;
14515 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14516 || TREE_CODE (t) == FIXED_POINT_TYPE)
14518 /* FIXME: The following check should pass:
14519 useless_type_conversion_p (const_cast <tree> (t),
14520 TREE_TYPE (TYPE_MIN_VALUE (t))
14521 but does not for C sizetypes in LTO. */
14524 /* Check various uses of TYPE_MAXVAL_RAW. */
14525 if (RECORD_OR_UNION_TYPE_P (t))
14527 if (!TYPE_BINFO (t))
14529 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14531 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14532 debug_tree (TYPE_BINFO (t));
14533 error_found = true;
14535 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14537 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14538 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14539 error_found = true;
14542 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14544 if (TYPE_METHOD_BASETYPE (t)
14545 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14546 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14548 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14549 debug_tree (TYPE_METHOD_BASETYPE (t));
14550 error_found = true;
14553 else if (TREE_CODE (t) == OFFSET_TYPE)
14555 if (TYPE_OFFSET_BASETYPE (t)
14556 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14557 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14559 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14560 debug_tree (TYPE_OFFSET_BASETYPE (t));
14561 error_found = true;
14564 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14565 || TREE_CODE (t) == FIXED_POINT_TYPE)
14567 /* FIXME: The following check should pass:
14568 useless_type_conversion_p (const_cast <tree> (t),
14569 TREE_TYPE (TYPE_MAX_VALUE (t))
14570 but does not for C sizetypes in LTO. */
14572 else if (TREE_CODE (t) == ARRAY_TYPE)
14574 if (TYPE_ARRAY_MAX_SIZE (t)
14575 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14577 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14578 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14579 error_found = true;
14582 else if (TYPE_MAX_VALUE_RAW (t))
14584 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14585 debug_tree (TYPE_MAX_VALUE_RAW (t));
14586 error_found = true;
14589 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14591 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14592 debug_tree (TYPE_LANG_SLOT_1 (t));
14593 error_found = true;
14596 /* Check various uses of TYPE_VALUES_RAW. */
14597 if (TREE_CODE (t) == ENUMERAL_TYPE)
14598 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14600 tree value = TREE_VALUE (l);
14601 tree name = TREE_PURPOSE (l);
14603 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14604 CONST_DECL of ENUMERAL TYPE. */
14605 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14607 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14608 debug_tree (value);
14609 debug_tree (name);
14610 error_found = true;
14612 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14613 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14615 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14616 "to the enum");
14617 debug_tree (value);
14618 debug_tree (name);
14619 error_found = true;
14621 if (TREE_CODE (name) != IDENTIFIER_NODE)
14623 error ("enum value name is not %<IDENTIFIER_NODE%>");
14624 debug_tree (value);
14625 debug_tree (name);
14626 error_found = true;
14629 else if (TREE_CODE (t) == ARRAY_TYPE)
14631 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14633 error ("array %<TYPE_DOMAIN%> is not integer type");
14634 debug_tree (TYPE_DOMAIN (t));
14635 error_found = true;
14638 else if (RECORD_OR_UNION_TYPE_P (t))
14640 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14642 error ("%<TYPE_FIELDS%> defined in incomplete type");
14643 error_found = true;
14645 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14647 /* TODO: verify properties of decls. */
14648 if (TREE_CODE (fld) == FIELD_DECL)
14650 else if (TREE_CODE (fld) == TYPE_DECL)
14652 else if (TREE_CODE (fld) == CONST_DECL)
14654 else if (VAR_P (fld))
14656 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14658 else if (TREE_CODE (fld) == USING_DECL)
14660 else if (TREE_CODE (fld) == FUNCTION_DECL)
14662 else
14664 error ("wrong tree in %<TYPE_FIELDS%> list");
14665 debug_tree (fld);
14666 error_found = true;
14670 else if (TREE_CODE (t) == INTEGER_TYPE
14671 || TREE_CODE (t) == BOOLEAN_TYPE
14672 || TREE_CODE (t) == OFFSET_TYPE
14673 || TREE_CODE (t) == REFERENCE_TYPE
14674 || TREE_CODE (t) == NULLPTR_TYPE
14675 || TREE_CODE (t) == POINTER_TYPE)
14677 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14679 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14680 "is %p",
14681 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14682 error_found = true;
14684 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14686 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14687 debug_tree (TYPE_CACHED_VALUES (t));
14688 error_found = true;
14690 /* Verify just enough of cache to ensure that no one copied it to new type.
14691 All copying should go by copy_node that should clear it. */
14692 else if (TYPE_CACHED_VALUES_P (t))
14694 int i;
14695 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14696 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14697 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14699 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14700 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14701 error_found = true;
14702 break;
14706 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14707 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14709 /* C++ FE uses TREE_PURPOSE to store initial values. */
14710 if (TREE_PURPOSE (l) && in_lto_p)
14712 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14713 debug_tree (l);
14714 error_found = true;
14716 if (!TYPE_P (TREE_VALUE (l)))
14718 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14719 debug_tree (l);
14720 error_found = true;
14723 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14725 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14726 debug_tree (TYPE_VALUES_RAW (t));
14727 error_found = true;
14729 if (TREE_CODE (t) != INTEGER_TYPE
14730 && TREE_CODE (t) != BOOLEAN_TYPE
14731 && TREE_CODE (t) != OFFSET_TYPE
14732 && TREE_CODE (t) != REFERENCE_TYPE
14733 && TREE_CODE (t) != NULLPTR_TYPE
14734 && TREE_CODE (t) != POINTER_TYPE
14735 && TYPE_CACHED_VALUES_P (t))
14737 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14738 error_found = true;
14741 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14742 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14743 of a type. */
14744 if (TREE_CODE (t) == METHOD_TYPE
14745 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14747 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14748 error_found = true;
14751 if (error_found)
14753 debug_tree (const_cast <tree> (t));
14754 internal_error ("%qs failed", __func__);
14759 /* Return 1 if ARG interpreted as signed in its precision is known to be
14760 always positive or 2 if ARG is known to be always negative, or 3 if
14761 ARG may be positive or negative. */
14764 get_range_pos_neg (tree arg)
14766 if (arg == error_mark_node)
14767 return 3;
14769 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14770 int cnt = 0;
14771 if (TREE_CODE (arg) == INTEGER_CST)
14773 wide_int w = wi::sext (wi::to_wide (arg), prec);
14774 if (wi::neg_p (w))
14775 return 2;
14776 else
14777 return 1;
14779 while (CONVERT_EXPR_P (arg)
14780 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14781 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14783 arg = TREE_OPERAND (arg, 0);
14784 /* Narrower value zero extended into wider type
14785 will always result in positive values. */
14786 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14787 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14788 return 1;
14789 prec = TYPE_PRECISION (TREE_TYPE (arg));
14790 if (++cnt > 30)
14791 return 3;
14794 if (TREE_CODE (arg) != SSA_NAME)
14795 return 3;
14796 wide_int arg_min, arg_max;
14797 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14799 gimple *g = SSA_NAME_DEF_STMT (arg);
14800 if (is_gimple_assign (g)
14801 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14803 tree t = gimple_assign_rhs1 (g);
14804 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14805 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14807 if (TYPE_UNSIGNED (TREE_TYPE (t))
14808 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14809 return 1;
14810 prec = TYPE_PRECISION (TREE_TYPE (t));
14811 arg = t;
14812 if (++cnt > 30)
14813 return 3;
14814 continue;
14817 return 3;
14819 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14821 /* For unsigned values, the "positive" range comes
14822 below the "negative" range. */
14823 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14824 return 1;
14825 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14826 return 2;
14828 else
14830 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14831 return 1;
14832 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14833 return 2;
14835 return 3;
14841 /* Return true if ARG is marked with the nonnull attribute in the
14842 current function signature. */
14844 bool
14845 nonnull_arg_p (const_tree arg)
14847 tree t, attrs, fntype;
14848 unsigned HOST_WIDE_INT arg_num;
14850 gcc_assert (TREE_CODE (arg) == PARM_DECL
14851 && (POINTER_TYPE_P (TREE_TYPE (arg))
14852 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14854 /* The static chain decl is always non null. */
14855 if (arg == cfun->static_chain_decl)
14856 return true;
14858 /* THIS argument of method is always non-NULL. */
14859 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14860 && arg == DECL_ARGUMENTS (cfun->decl)
14861 && flag_delete_null_pointer_checks)
14862 return true;
14864 /* Values passed by reference are always non-NULL. */
14865 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14866 && flag_delete_null_pointer_checks)
14867 return true;
14869 fntype = TREE_TYPE (cfun->decl);
14870 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14872 attrs = lookup_attribute ("nonnull", attrs);
14874 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14875 if (attrs == NULL_TREE)
14876 return false;
14878 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14879 if (TREE_VALUE (attrs) == NULL_TREE)
14880 return true;
14882 /* Get the position number for ARG in the function signature. */
14883 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14885 t = DECL_CHAIN (t), arg_num++)
14887 if (t == arg)
14888 break;
14891 gcc_assert (t == arg);
14893 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14894 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14896 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14897 return true;
14901 return false;
14904 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14905 information. */
14907 location_t
14908 set_block (location_t loc, tree block)
14910 location_t pure_loc = get_pure_location (loc);
14911 source_range src_range = get_range_from_loc (line_table, loc);
14912 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14915 location_t
14916 set_source_range (tree expr, location_t start, location_t finish)
14918 source_range src_range;
14919 src_range.m_start = start;
14920 src_range.m_finish = finish;
14921 return set_source_range (expr, src_range);
14924 location_t
14925 set_source_range (tree expr, source_range src_range)
14927 if (!EXPR_P (expr))
14928 return UNKNOWN_LOCATION;
14930 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14931 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14932 pure_loc,
14933 src_range,
14934 NULL);
14935 SET_EXPR_LOCATION (expr, adhoc);
14936 return adhoc;
14939 /* Return EXPR, potentially wrapped with a node expression LOC,
14940 if !CAN_HAVE_LOCATION_P (expr).
14942 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14943 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14945 Wrapper nodes can be identified using location_wrapper_p. */
14947 tree
14948 maybe_wrap_with_location (tree expr, location_t loc)
14950 if (expr == NULL)
14951 return NULL;
14952 if (loc == UNKNOWN_LOCATION)
14953 return expr;
14954 if (CAN_HAVE_LOCATION_P (expr))
14955 return expr;
14956 /* We should only be adding wrappers for constants and for decls,
14957 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14958 gcc_assert (CONSTANT_CLASS_P (expr)
14959 || DECL_P (expr)
14960 || EXCEPTIONAL_CLASS_P (expr));
14962 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14963 any impact of the wrapper nodes. */
14964 if (EXCEPTIONAL_CLASS_P (expr))
14965 return expr;
14967 /* If any auto_suppress_location_wrappers are active, don't create
14968 wrappers. */
14969 if (suppress_location_wrappers > 0)
14970 return expr;
14972 tree_code code
14973 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14974 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14975 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14976 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14977 /* Mark this node as being a wrapper. */
14978 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14979 return wrapper;
14982 int suppress_location_wrappers;
14984 /* Return the name of combined function FN, for debugging purposes. */
14986 const char *
14987 combined_fn_name (combined_fn fn)
14989 if (builtin_fn_p (fn))
14991 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14992 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14994 else
14995 return internal_fn_name (as_internal_fn (fn));
14998 /* Return a bitmap with a bit set corresponding to each argument in
14999 a function call type FNTYPE declared with attribute nonnull,
15000 or null if none of the function's argument are nonnull. The caller
15001 must free the bitmap. */
15003 bitmap
15004 get_nonnull_args (const_tree fntype)
15006 if (fntype == NULL_TREE)
15007 return NULL;
15009 tree attrs = TYPE_ATTRIBUTES (fntype);
15010 if (!attrs)
15011 return NULL;
15013 bitmap argmap = NULL;
15015 /* A function declaration can specify multiple attribute nonnull,
15016 each with zero or more arguments. The loop below creates a bitmap
15017 representing a union of all the arguments. An empty (but non-null)
15018 bitmap means that all arguments have been declaraed nonnull. */
15019 for ( ; attrs; attrs = TREE_CHAIN (attrs))
15021 attrs = lookup_attribute ("nonnull", attrs);
15022 if (!attrs)
15023 break;
15025 if (!argmap)
15026 argmap = BITMAP_ALLOC (NULL);
15028 if (!TREE_VALUE (attrs))
15030 /* Clear the bitmap in case a previous attribute nonnull
15031 set it and this one overrides it for all arguments. */
15032 bitmap_clear (argmap);
15033 return argmap;
15036 /* Iterate over the indices of the format arguments declared nonnull
15037 and set a bit for each. */
15038 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15040 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15041 bitmap_set_bit (argmap, val);
15045 return argmap;
15048 /* Returns true if TYPE is a type where it and all of its subobjects
15049 (recursively) are of structure, union, or array type. */
15051 static bool
15052 default_is_empty_type (tree type)
15054 if (RECORD_OR_UNION_TYPE_P (type))
15056 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15057 if (TREE_CODE (field) == FIELD_DECL
15058 && !DECL_PADDING_P (field)
15059 && !default_is_empty_type (TREE_TYPE (field)))
15060 return false;
15061 return true;
15063 else if (TREE_CODE (type) == ARRAY_TYPE)
15064 return (integer_minus_onep (array_type_nelts (type))
15065 || TYPE_DOMAIN (type) == NULL_TREE
15066 || default_is_empty_type (TREE_TYPE (type)));
15067 return false;
15070 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15071 that shouldn't be passed via stack. */
15073 bool
15074 default_is_empty_record (const_tree type)
15076 if (!abi_version_at_least (12))
15077 return false;
15079 if (type == error_mark_node)
15080 return false;
15082 if (TREE_ADDRESSABLE (type))
15083 return false;
15085 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15088 /* Like int_size_in_bytes, but handle empty records specially. */
15090 HOST_WIDE_INT
15091 arg_int_size_in_bytes (const_tree type)
15093 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15096 /* Like size_in_bytes, but handle empty records specially. */
15098 tree
15099 arg_size_in_bytes (const_tree type)
15101 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15104 /* Return true if an expression with CODE has to have the same result type as
15105 its first operand. */
15107 bool
15108 expr_type_first_operand_type_p (tree_code code)
15110 switch (code)
15112 case NEGATE_EXPR:
15113 case ABS_EXPR:
15114 case BIT_NOT_EXPR:
15115 case PAREN_EXPR:
15116 case CONJ_EXPR:
15118 case PLUS_EXPR:
15119 case MINUS_EXPR:
15120 case MULT_EXPR:
15121 case TRUNC_DIV_EXPR:
15122 case CEIL_DIV_EXPR:
15123 case FLOOR_DIV_EXPR:
15124 case ROUND_DIV_EXPR:
15125 case TRUNC_MOD_EXPR:
15126 case CEIL_MOD_EXPR:
15127 case FLOOR_MOD_EXPR:
15128 case ROUND_MOD_EXPR:
15129 case RDIV_EXPR:
15130 case EXACT_DIV_EXPR:
15131 case MIN_EXPR:
15132 case MAX_EXPR:
15133 case BIT_IOR_EXPR:
15134 case BIT_XOR_EXPR:
15135 case BIT_AND_EXPR:
15137 case LSHIFT_EXPR:
15138 case RSHIFT_EXPR:
15139 case LROTATE_EXPR:
15140 case RROTATE_EXPR:
15141 return true;
15143 default:
15144 return false;
15148 /* Return a typenode for the "standard" C type with a given name. */
15149 tree
15150 get_typenode_from_name (const char *name)
15152 if (name == NULL || *name == '\0')
15153 return NULL_TREE;
15155 if (strcmp (name, "char") == 0)
15156 return char_type_node;
15157 if (strcmp (name, "unsigned char") == 0)
15158 return unsigned_char_type_node;
15159 if (strcmp (name, "signed char") == 0)
15160 return signed_char_type_node;
15162 if (strcmp (name, "short int") == 0)
15163 return short_integer_type_node;
15164 if (strcmp (name, "short unsigned int") == 0)
15165 return short_unsigned_type_node;
15167 if (strcmp (name, "int") == 0)
15168 return integer_type_node;
15169 if (strcmp (name, "unsigned int") == 0)
15170 return unsigned_type_node;
15172 if (strcmp (name, "long int") == 0)
15173 return long_integer_type_node;
15174 if (strcmp (name, "long unsigned int") == 0)
15175 return long_unsigned_type_node;
15177 if (strcmp (name, "long long int") == 0)
15178 return long_long_integer_type_node;
15179 if (strcmp (name, "long long unsigned int") == 0)
15180 return long_long_unsigned_type_node;
15182 gcc_unreachable ();
15185 /* List of pointer types used to declare builtins before we have seen their
15186 real declaration.
15188 Keep the size up to date in tree.h ! */
15189 const builtin_structptr_type builtin_structptr_types[6] =
15191 { fileptr_type_node, ptr_type_node, "FILE" },
15192 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15193 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15194 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15195 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15196 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15199 /* Return the maximum object size. */
15201 tree
15202 max_object_size (void)
15204 /* To do: Make this a configurable parameter. */
15205 return TYPE_MAX_VALUE (ptrdiff_type_node);
15208 #if CHECKING_P
15210 namespace selftest {
15212 /* Selftests for tree. */
15214 /* Verify that integer constants are sane. */
15216 static void
15217 test_integer_constants ()
15219 ASSERT_TRUE (integer_type_node != NULL);
15220 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15222 tree type = integer_type_node;
15224 tree zero = build_zero_cst (type);
15225 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15226 ASSERT_EQ (type, TREE_TYPE (zero));
15228 tree one = build_int_cst (type, 1);
15229 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15230 ASSERT_EQ (type, TREE_TYPE (zero));
15233 /* Verify identifiers. */
15235 static void
15236 test_identifiers ()
15238 tree identifier = get_identifier ("foo");
15239 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15240 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15243 /* Verify LABEL_DECL. */
15245 static void
15246 test_labels ()
15248 tree identifier = get_identifier ("err");
15249 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15250 identifier, void_type_node);
15251 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15252 ASSERT_FALSE (FORCED_LABEL (label_decl));
15255 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15256 are given by VALS. */
15258 static tree
15259 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15261 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15262 tree_vector_builder builder (type, vals.length (), 1);
15263 builder.splice (vals);
15264 return builder.build ();
15267 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15269 static void
15270 check_vector_cst (vec<tree> expected, tree actual)
15272 ASSERT_KNOWN_EQ (expected.length (),
15273 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15274 for (unsigned int i = 0; i < expected.length (); ++i)
15275 ASSERT_EQ (wi::to_wide (expected[i]),
15276 wi::to_wide (vector_cst_elt (actual, i)));
15279 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15280 and that its elements match EXPECTED. */
15282 static void
15283 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15284 unsigned int npatterns)
15286 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15287 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15288 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15289 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15290 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15291 check_vector_cst (expected, actual);
15294 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15295 and NPATTERNS background elements, and that its elements match
15296 EXPECTED. */
15298 static void
15299 check_vector_cst_fill (vec<tree> expected, tree actual,
15300 unsigned int npatterns)
15302 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15303 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15304 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15305 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15306 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15307 check_vector_cst (expected, actual);
15310 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15311 and that its elements match EXPECTED. */
15313 static void
15314 check_vector_cst_stepped (vec<tree> expected, tree actual,
15315 unsigned int npatterns)
15317 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15318 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15319 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15320 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15321 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15322 check_vector_cst (expected, actual);
15325 /* Test the creation of VECTOR_CSTs. */
15327 static void
15328 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15330 auto_vec<tree, 8> elements (8);
15331 elements.quick_grow (8);
15332 tree element_type = build_nonstandard_integer_type (16, true);
15333 tree vector_type = build_vector_type (element_type, 8);
15335 /* Test a simple linear series with a base of 0 and a step of 1:
15336 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15337 for (unsigned int i = 0; i < 8; ++i)
15338 elements[i] = build_int_cst (element_type, i);
15339 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15340 check_vector_cst_stepped (elements, vector, 1);
15342 /* Try the same with the first element replaced by 100:
15343 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15344 elements[0] = build_int_cst (element_type, 100);
15345 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15346 check_vector_cst_stepped (elements, vector, 1);
15348 /* Try a series that wraps around.
15349 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15350 for (unsigned int i = 1; i < 8; ++i)
15351 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15352 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15353 check_vector_cst_stepped (elements, vector, 1);
15355 /* Try a downward series:
15356 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15357 for (unsigned int i = 1; i < 8; ++i)
15358 elements[i] = build_int_cst (element_type, 80 - i);
15359 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15360 check_vector_cst_stepped (elements, vector, 1);
15362 /* Try two interleaved series with different bases and steps:
15363 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15364 elements[1] = build_int_cst (element_type, 53);
15365 for (unsigned int i = 2; i < 8; i += 2)
15367 elements[i] = build_int_cst (element_type, 70 - i * 2);
15368 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15370 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15371 check_vector_cst_stepped (elements, vector, 2);
15373 /* Try a duplicated value:
15374 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15375 for (unsigned int i = 1; i < 8; ++i)
15376 elements[i] = elements[0];
15377 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15378 check_vector_cst_duplicate (elements, vector, 1);
15380 /* Try an interleaved duplicated value:
15381 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15382 elements[1] = build_int_cst (element_type, 55);
15383 for (unsigned int i = 2; i < 8; ++i)
15384 elements[i] = elements[i - 2];
15385 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15386 check_vector_cst_duplicate (elements, vector, 2);
15388 /* Try a duplicated value with 2 exceptions
15389 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15390 elements[0] = build_int_cst (element_type, 41);
15391 elements[1] = build_int_cst (element_type, 97);
15392 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15393 check_vector_cst_fill (elements, vector, 2);
15395 /* Try with and without a step
15396 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15397 for (unsigned int i = 3; i < 8; i += 2)
15398 elements[i] = build_int_cst (element_type, i * 7);
15399 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15400 check_vector_cst_stepped (elements, vector, 2);
15402 /* Try a fully-general constant:
15403 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15404 elements[5] = build_int_cst (element_type, 9990);
15405 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15406 check_vector_cst_fill (elements, vector, 4);
15409 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15410 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15411 modifying its argument in-place. */
15413 static void
15414 check_strip_nops (tree node, tree expected)
15416 STRIP_NOPS (node);
15417 ASSERT_EQ (expected, node);
15420 /* Verify location wrappers. */
15422 static void
15423 test_location_wrappers ()
15425 location_t loc = BUILTINS_LOCATION;
15427 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15429 /* Wrapping a constant. */
15430 tree int_cst = build_int_cst (integer_type_node, 42);
15431 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15432 ASSERT_FALSE (location_wrapper_p (int_cst));
15434 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15435 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15436 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15437 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15439 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15440 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15442 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15443 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15444 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15445 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15447 /* Wrapping a STRING_CST. */
15448 tree string_cst = build_string (4, "foo");
15449 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15450 ASSERT_FALSE (location_wrapper_p (string_cst));
15452 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15453 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15454 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15455 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15456 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15459 /* Wrapping a variable. */
15460 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15461 get_identifier ("some_int_var"),
15462 integer_type_node);
15463 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15464 ASSERT_FALSE (location_wrapper_p (int_var));
15466 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15467 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15468 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15469 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15471 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15472 wrapper. */
15473 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15474 ASSERT_FALSE (location_wrapper_p (r_cast));
15475 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15477 /* Verify that STRIP_NOPS removes wrappers. */
15478 check_strip_nops (wrapped_int_cst, int_cst);
15479 check_strip_nops (wrapped_string_cst, string_cst);
15480 check_strip_nops (wrapped_int_var, int_var);
15483 /* Test various tree predicates. Verify that location wrappers don't
15484 affect the results. */
15486 static void
15487 test_predicates ()
15489 /* Build various constants and wrappers around them. */
15491 location_t loc = BUILTINS_LOCATION;
15493 tree i_0 = build_int_cst (integer_type_node, 0);
15494 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15496 tree i_1 = build_int_cst (integer_type_node, 1);
15497 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15499 tree i_m1 = build_int_cst (integer_type_node, -1);
15500 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15502 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15503 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15504 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15505 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15506 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15507 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15509 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15510 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15511 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15513 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15514 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15515 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15517 /* TODO: vector constants. */
15519 /* Test integer_onep. */
15520 ASSERT_FALSE (integer_onep (i_0));
15521 ASSERT_FALSE (integer_onep (wr_i_0));
15522 ASSERT_TRUE (integer_onep (i_1));
15523 ASSERT_TRUE (integer_onep (wr_i_1));
15524 ASSERT_FALSE (integer_onep (i_m1));
15525 ASSERT_FALSE (integer_onep (wr_i_m1));
15526 ASSERT_FALSE (integer_onep (f_0));
15527 ASSERT_FALSE (integer_onep (wr_f_0));
15528 ASSERT_FALSE (integer_onep (f_1));
15529 ASSERT_FALSE (integer_onep (wr_f_1));
15530 ASSERT_FALSE (integer_onep (f_m1));
15531 ASSERT_FALSE (integer_onep (wr_f_m1));
15532 ASSERT_FALSE (integer_onep (c_i_0));
15533 ASSERT_TRUE (integer_onep (c_i_1));
15534 ASSERT_FALSE (integer_onep (c_i_m1));
15535 ASSERT_FALSE (integer_onep (c_f_0));
15536 ASSERT_FALSE (integer_onep (c_f_1));
15537 ASSERT_FALSE (integer_onep (c_f_m1));
15539 /* Test integer_zerop. */
15540 ASSERT_TRUE (integer_zerop (i_0));
15541 ASSERT_TRUE (integer_zerop (wr_i_0));
15542 ASSERT_FALSE (integer_zerop (i_1));
15543 ASSERT_FALSE (integer_zerop (wr_i_1));
15544 ASSERT_FALSE (integer_zerop (i_m1));
15545 ASSERT_FALSE (integer_zerop (wr_i_m1));
15546 ASSERT_FALSE (integer_zerop (f_0));
15547 ASSERT_FALSE (integer_zerop (wr_f_0));
15548 ASSERT_FALSE (integer_zerop (f_1));
15549 ASSERT_FALSE (integer_zerop (wr_f_1));
15550 ASSERT_FALSE (integer_zerop (f_m1));
15551 ASSERT_FALSE (integer_zerop (wr_f_m1));
15552 ASSERT_TRUE (integer_zerop (c_i_0));
15553 ASSERT_FALSE (integer_zerop (c_i_1));
15554 ASSERT_FALSE (integer_zerop (c_i_m1));
15555 ASSERT_FALSE (integer_zerop (c_f_0));
15556 ASSERT_FALSE (integer_zerop (c_f_1));
15557 ASSERT_FALSE (integer_zerop (c_f_m1));
15559 /* Test integer_all_onesp. */
15560 ASSERT_FALSE (integer_all_onesp (i_0));
15561 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15562 ASSERT_FALSE (integer_all_onesp (i_1));
15563 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15564 ASSERT_TRUE (integer_all_onesp (i_m1));
15565 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15566 ASSERT_FALSE (integer_all_onesp (f_0));
15567 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15568 ASSERT_FALSE (integer_all_onesp (f_1));
15569 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15570 ASSERT_FALSE (integer_all_onesp (f_m1));
15571 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15572 ASSERT_FALSE (integer_all_onesp (c_i_0));
15573 ASSERT_FALSE (integer_all_onesp (c_i_1));
15574 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15575 ASSERT_FALSE (integer_all_onesp (c_f_0));
15576 ASSERT_FALSE (integer_all_onesp (c_f_1));
15577 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15579 /* Test integer_minus_onep. */
15580 ASSERT_FALSE (integer_minus_onep (i_0));
15581 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15582 ASSERT_FALSE (integer_minus_onep (i_1));
15583 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15584 ASSERT_TRUE (integer_minus_onep (i_m1));
15585 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15586 ASSERT_FALSE (integer_minus_onep (f_0));
15587 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15588 ASSERT_FALSE (integer_minus_onep (f_1));
15589 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15590 ASSERT_FALSE (integer_minus_onep (f_m1));
15591 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15592 ASSERT_FALSE (integer_minus_onep (c_i_0));
15593 ASSERT_FALSE (integer_minus_onep (c_i_1));
15594 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15595 ASSERT_FALSE (integer_minus_onep (c_f_0));
15596 ASSERT_FALSE (integer_minus_onep (c_f_1));
15597 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15599 /* Test integer_each_onep. */
15600 ASSERT_FALSE (integer_each_onep (i_0));
15601 ASSERT_FALSE (integer_each_onep (wr_i_0));
15602 ASSERT_TRUE (integer_each_onep (i_1));
15603 ASSERT_TRUE (integer_each_onep (wr_i_1));
15604 ASSERT_FALSE (integer_each_onep (i_m1));
15605 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15606 ASSERT_FALSE (integer_each_onep (f_0));
15607 ASSERT_FALSE (integer_each_onep (wr_f_0));
15608 ASSERT_FALSE (integer_each_onep (f_1));
15609 ASSERT_FALSE (integer_each_onep (wr_f_1));
15610 ASSERT_FALSE (integer_each_onep (f_m1));
15611 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15612 ASSERT_FALSE (integer_each_onep (c_i_0));
15613 ASSERT_FALSE (integer_each_onep (c_i_1));
15614 ASSERT_FALSE (integer_each_onep (c_i_m1));
15615 ASSERT_FALSE (integer_each_onep (c_f_0));
15616 ASSERT_FALSE (integer_each_onep (c_f_1));
15617 ASSERT_FALSE (integer_each_onep (c_f_m1));
15619 /* Test integer_truep. */
15620 ASSERT_FALSE (integer_truep (i_0));
15621 ASSERT_FALSE (integer_truep (wr_i_0));
15622 ASSERT_TRUE (integer_truep (i_1));
15623 ASSERT_TRUE (integer_truep (wr_i_1));
15624 ASSERT_FALSE (integer_truep (i_m1));
15625 ASSERT_FALSE (integer_truep (wr_i_m1));
15626 ASSERT_FALSE (integer_truep (f_0));
15627 ASSERT_FALSE (integer_truep (wr_f_0));
15628 ASSERT_FALSE (integer_truep (f_1));
15629 ASSERT_FALSE (integer_truep (wr_f_1));
15630 ASSERT_FALSE (integer_truep (f_m1));
15631 ASSERT_FALSE (integer_truep (wr_f_m1));
15632 ASSERT_FALSE (integer_truep (c_i_0));
15633 ASSERT_TRUE (integer_truep (c_i_1));
15634 ASSERT_FALSE (integer_truep (c_i_m1));
15635 ASSERT_FALSE (integer_truep (c_f_0));
15636 ASSERT_FALSE (integer_truep (c_f_1));
15637 ASSERT_FALSE (integer_truep (c_f_m1));
15639 /* Test integer_nonzerop. */
15640 ASSERT_FALSE (integer_nonzerop (i_0));
15641 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15642 ASSERT_TRUE (integer_nonzerop (i_1));
15643 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15644 ASSERT_TRUE (integer_nonzerop (i_m1));
15645 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15646 ASSERT_FALSE (integer_nonzerop (f_0));
15647 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15648 ASSERT_FALSE (integer_nonzerop (f_1));
15649 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15650 ASSERT_FALSE (integer_nonzerop (f_m1));
15651 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15652 ASSERT_FALSE (integer_nonzerop (c_i_0));
15653 ASSERT_TRUE (integer_nonzerop (c_i_1));
15654 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15655 ASSERT_FALSE (integer_nonzerop (c_f_0));
15656 ASSERT_FALSE (integer_nonzerop (c_f_1));
15657 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15659 /* Test real_zerop. */
15660 ASSERT_FALSE (real_zerop (i_0));
15661 ASSERT_FALSE (real_zerop (wr_i_0));
15662 ASSERT_FALSE (real_zerop (i_1));
15663 ASSERT_FALSE (real_zerop (wr_i_1));
15664 ASSERT_FALSE (real_zerop (i_m1));
15665 ASSERT_FALSE (real_zerop (wr_i_m1));
15666 ASSERT_TRUE (real_zerop (f_0));
15667 ASSERT_TRUE (real_zerop (wr_f_0));
15668 ASSERT_FALSE (real_zerop (f_1));
15669 ASSERT_FALSE (real_zerop (wr_f_1));
15670 ASSERT_FALSE (real_zerop (f_m1));
15671 ASSERT_FALSE (real_zerop (wr_f_m1));
15672 ASSERT_FALSE (real_zerop (c_i_0));
15673 ASSERT_FALSE (real_zerop (c_i_1));
15674 ASSERT_FALSE (real_zerop (c_i_m1));
15675 ASSERT_TRUE (real_zerop (c_f_0));
15676 ASSERT_FALSE (real_zerop (c_f_1));
15677 ASSERT_FALSE (real_zerop (c_f_m1));
15679 /* Test real_onep. */
15680 ASSERT_FALSE (real_onep (i_0));
15681 ASSERT_FALSE (real_onep (wr_i_0));
15682 ASSERT_FALSE (real_onep (i_1));
15683 ASSERT_FALSE (real_onep (wr_i_1));
15684 ASSERT_FALSE (real_onep (i_m1));
15685 ASSERT_FALSE (real_onep (wr_i_m1));
15686 ASSERT_FALSE (real_onep (f_0));
15687 ASSERT_FALSE (real_onep (wr_f_0));
15688 ASSERT_TRUE (real_onep (f_1));
15689 ASSERT_TRUE (real_onep (wr_f_1));
15690 ASSERT_FALSE (real_onep (f_m1));
15691 ASSERT_FALSE (real_onep (wr_f_m1));
15692 ASSERT_FALSE (real_onep (c_i_0));
15693 ASSERT_FALSE (real_onep (c_i_1));
15694 ASSERT_FALSE (real_onep (c_i_m1));
15695 ASSERT_FALSE (real_onep (c_f_0));
15696 ASSERT_TRUE (real_onep (c_f_1));
15697 ASSERT_FALSE (real_onep (c_f_m1));
15699 /* Test real_minus_onep. */
15700 ASSERT_FALSE (real_minus_onep (i_0));
15701 ASSERT_FALSE (real_minus_onep (wr_i_0));
15702 ASSERT_FALSE (real_minus_onep (i_1));
15703 ASSERT_FALSE (real_minus_onep (wr_i_1));
15704 ASSERT_FALSE (real_minus_onep (i_m1));
15705 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15706 ASSERT_FALSE (real_minus_onep (f_0));
15707 ASSERT_FALSE (real_minus_onep (wr_f_0));
15708 ASSERT_FALSE (real_minus_onep (f_1));
15709 ASSERT_FALSE (real_minus_onep (wr_f_1));
15710 ASSERT_TRUE (real_minus_onep (f_m1));
15711 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15712 ASSERT_FALSE (real_minus_onep (c_i_0));
15713 ASSERT_FALSE (real_minus_onep (c_i_1));
15714 ASSERT_FALSE (real_minus_onep (c_i_m1));
15715 ASSERT_FALSE (real_minus_onep (c_f_0));
15716 ASSERT_FALSE (real_minus_onep (c_f_1));
15717 ASSERT_TRUE (real_minus_onep (c_f_m1));
15719 /* Test zerop. */
15720 ASSERT_TRUE (zerop (i_0));
15721 ASSERT_TRUE (zerop (wr_i_0));
15722 ASSERT_FALSE (zerop (i_1));
15723 ASSERT_FALSE (zerop (wr_i_1));
15724 ASSERT_FALSE (zerop (i_m1));
15725 ASSERT_FALSE (zerop (wr_i_m1));
15726 ASSERT_TRUE (zerop (f_0));
15727 ASSERT_TRUE (zerop (wr_f_0));
15728 ASSERT_FALSE (zerop (f_1));
15729 ASSERT_FALSE (zerop (wr_f_1));
15730 ASSERT_FALSE (zerop (f_m1));
15731 ASSERT_FALSE (zerop (wr_f_m1));
15732 ASSERT_TRUE (zerop (c_i_0));
15733 ASSERT_FALSE (zerop (c_i_1));
15734 ASSERT_FALSE (zerop (c_i_m1));
15735 ASSERT_TRUE (zerop (c_f_0));
15736 ASSERT_FALSE (zerop (c_f_1));
15737 ASSERT_FALSE (zerop (c_f_m1));
15739 /* Test tree_expr_nonnegative_p. */
15740 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15741 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15742 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15743 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15744 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15745 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15746 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15747 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15748 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15749 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15750 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15751 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15752 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15753 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15754 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15755 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15756 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15757 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15759 /* Test tree_expr_nonzero_p. */
15760 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15761 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15762 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15763 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15764 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15765 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15767 /* Test integer_valued_real_p. */
15768 ASSERT_FALSE (integer_valued_real_p (i_0));
15769 ASSERT_TRUE (integer_valued_real_p (f_0));
15770 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15771 ASSERT_TRUE (integer_valued_real_p (f_1));
15772 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15774 /* Test integer_pow2p. */
15775 ASSERT_FALSE (integer_pow2p (i_0));
15776 ASSERT_TRUE (integer_pow2p (i_1));
15777 ASSERT_TRUE (integer_pow2p (wr_i_1));
15779 /* Test uniform_integer_cst_p. */
15780 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15781 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15782 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15783 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15784 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15785 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15786 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15787 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15788 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15789 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15790 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15791 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15792 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15793 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15794 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15795 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15796 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15797 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15800 /* Check that string escaping works correctly. */
15802 static void
15803 test_escaped_strings (void)
15805 int saved_cutoff;
15806 escaped_string msg;
15808 msg.escape (NULL);
15809 /* ASSERT_STREQ does not accept NULL as a valid test
15810 result, so we have to use ASSERT_EQ instead. */
15811 ASSERT_EQ (NULL, (const char *) msg);
15813 msg.escape ("");
15814 ASSERT_STREQ ("", (const char *) msg);
15816 msg.escape ("foobar");
15817 ASSERT_STREQ ("foobar", (const char *) msg);
15819 /* Ensure that we have -fmessage-length set to 0. */
15820 saved_cutoff = pp_line_cutoff (global_dc->printer);
15821 pp_line_cutoff (global_dc->printer) = 0;
15823 msg.escape ("foo\nbar");
15824 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15826 msg.escape ("\a\b\f\n\r\t\v");
15827 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15829 /* Now repeat the tests with -fmessage-length set to 5. */
15830 pp_line_cutoff (global_dc->printer) = 5;
15832 /* Note that the newline is not translated into an escape. */
15833 msg.escape ("foo\nbar");
15834 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15836 msg.escape ("\a\b\f\n\r\t\v");
15837 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15839 /* Restore the original message length setting. */
15840 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15843 /* Run all of the selftests within this file. */
15845 void
15846 tree_c_tests ()
15848 test_integer_constants ();
15849 test_identifiers ();
15850 test_labels ();
15851 test_vector_cst_patterns ();
15852 test_location_wrappers ();
15853 test_predicates ();
15854 test_escaped_strings ();
15857 } // namespace selftest
15859 #endif /* CHECKING_P */
15861 #include "gt-tree.h"